You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by of...@apache.org on 2020/09/19 19:57:27 UTC

[bigtop] 01/01: Website updated to 42ed1fa0

This is an automated email from the ASF dual-hosted git repository.

oflebbe pushed a commit to branch asf-staging
in repository https://gitbox.apache.org/repos/asf/bigtop.git

commit e4febc22962d142ef4325350cec495460dd386c0
Author: Olaf Flebbe <of...@oflebbe.de>
AuthorDate: Sat Sep 19 21:55:27 2020 +0200

    Website updated to 42ed1fa0
---
 BUILDING.txt                                       |    104 -
 CHANGES.txt                                        |   2722 -
 LICENSE                                            |    202 -
 MAINTAINERS.txt                                    |     30 -
 NOTICE                                             |     14 -
 README.md                                          |    271 -
 bigtop-bigpetstore/README.md                       |     49 -
 .../bigpetstore-mapreduce/BPS_analytics.pig        |     79 -
 bigtop-bigpetstore/bigpetstore-mapreduce/README.md |    201 -
 bigtop-bigpetstore/bigpetstore-mapreduce/arch.dot  |     41 -
 .../bigpetstore-mapreduce/build.gradle             |    305 -
 bigtop-bigpetstore/bigpetstore-mapreduce/pom.xml   |    584 -
 .../bigpetstore-mapreduce/settings.gradle          |     18 -
 .../bigtop/bigpetstore/BigPetStoreMahoutIT.java    |     74 -
 .../bigtop/bigpetstore/BigPetStorePigIT.java       |    101 -
 .../org/apache/bigtop/bigpetstore/ITUtils.java     |    169 -
 .../bigpetstore/contract/PetStoreStatistics.java   |     34 -
 .../apache/bigtop/bigpetstore/etl/CrunchETL.java   |    142 -
 .../apache/bigtop/bigpetstore/etl/LineItem.java    |    112 -
 .../bigtop/bigpetstore/etl/PigCSVCleaner.java      |    156 -
 .../bigtop/bigpetstore/generator/BPSGenerator.java |    108 -
 .../bigpetstore/generator/CustomerGenerator.scala  |     97 -
 .../bigpetstore/generator/PetStoreTransaction.java |     32 -
 .../generator/PetStoreTransactionInputSplit.java   |     73 -
 .../generator/PetStoreTransactionsInputFormat.java |    139 -
 .../bigtop/bigpetstore/generator/util/Product.java |     80 -
 .../bigpetstore/generator/util/ProductType.java    |     46 -
 .../bigtop/bigpetstore/generator/util/State.java   |     43 -
 .../bigpetstore/recommend/ItemRecommender.scala    |    121 -
 .../bigpetstore/util/BigPetStoreConstants.java     |     41 -
 .../bigtop/bigpetstore/util/DeveloperTools.java    |     58 -
 .../bigtop/bigpetstore/util/NumericalIdUtils.java  |     48 -
 .../bigpetstore/util/PetStoreParseFunctions.java   |     55 -
 .../bigtop/bigpetstore/util/StringUtils.java       |     53 -
 .../bigtop/bigpetstore/generator/DataForger.scala  |    280 -
 .../generator/TransactionIteratorFactory.scala     |    106 -
 .../apache/bigtop/bigpetstore/docs/TestDocs.java   |     37 -
 .../generator/TestNumericalIdUtils.java            |     35 -
 .../TestPetStoreTransactionGeneratorJob.java       |    104 -
 .../src/test/resources/log4j.properties            |     47 -
 .../bigtop/bigpetstore/ScalaTestSample.scala       |     35 -
 bigtop-bigpetstore/bigpetstore-spark/README.md     |    175 -
 bigtop-bigpetstore/bigpetstore-spark/arch.dot      |     33 -
 bigtop-bigpetstore/bigpetstore-spark/build.gradle  |    146 -
 .../spark/analytics/PetStoreStatistics.scala       |    182 -
 .../spark/analytics/RecommendProducts.scala        |    118 -
 .../bigpetstore/spark/datamodel/DataModel.scala    |     78 -
 .../bigpetstore/spark/datamodel/IOUtils.scala      |    132 -
 .../org/apache/bigpetstore/spark/etl/ETL.scala     |    222 -
 .../bigpetstore/spark/generator/SparkDriver.scala  |    251 -
 .../bigpetstore/spark/TestFullPipeline.scala       |    103 -
 .../spark/analytics/AnalyticsSuite.scala           |     43 -
 .../bigpetstore/spark/datamodel/IOUtilsSuite.scala |     97 -
 .../apache/bigpetstore/spark/etl/ETLSuite.scala    |    164 -
 .../spark/generator/SparkDriverSuite.scala         |     72 -
 .../bigpetstore-transaction-queue/.dockerignore    |      3 -
 .../bigpetstore-transaction-queue/Dockerfile       |     29 -
 .../bigpetstore-transaction-queue/README.md        |    180 -
 .../bigpetstore-transaction-queue/build.gradle     |     62 -
 .../bigpetstore-transaction-queue/settings.gradle  |     16 -
 .../bigtop/bigpetstore/qstream/FileLoadGen.java    |     75 -
 .../bigtop/bigpetstore/qstream/HttpLoadGen.java    |     86 -
 .../apache/bigtop/bigpetstore/qstream/LoadGen.java |    175 -
 .../bigtop/bigpetstore/qstream/LoadGenFactory.java |    106 -
 .../bigpetstore/qstream/SimpleHttpServer.java      |    128 -
 .../apache/bigtop/bigpetstore/qstream/Utils.java   |     96 -
 .../bigtop/bigpetstore/qstream/TestLoadGen.java    |     88 -
 bigtop-ci/build.sh                                 |     99 -
 bigtop-ci/entrypoint.sh                            |     18 -
 bigtop-ci/jenkins/README                           |     31 -
 bigtop-ci/jenkins/jobsCreator.groovy               |    250 -
 bigtop-data-generators/README.md                   |     52 -
 .../bigpetstore-data-generator/README.md           |     55 -
 .../bigpetstore-data-generator/build.gradle        |     39 -
 .../MonteCarloExponentialSamplingExample.groovy    |     44 -
 .../MonteCarloGaussianSamplingExample.groovy       |     45 -
 .../bigpetstore-data-generator/settings.gradle     |     16 -
 .../datagenerators/bigpetstore/Constants.java      |     92 -
 .../bigpetstore/CustomerGenerator.java             |     41 -
 .../datagenerators/bigpetstore/DataLoader.java     |     33 -
 .../bigpetstore/ProductGenerator.java              |     50 -
 .../bigpetstore/PurchasingModelGenerator.java      |     40 -
 .../datagenerators/bigpetstore/StoreGenerator.java |     38 -
 .../bigpetstore/TransactionGenerator.java          |     43 -
 .../datagenerators/bigpetstore/cli/Driver.java     |    315 -
 .../datagenerators/bigpetstore/cli/Simulation.java |    188 -
 .../bigpetstore/datamodels/Customer.java           |     59 -
 .../bigpetstore/datamodels/PetSpecies.java         |     22 -
 .../bigpetstore/datamodels/Product.java            |     96 -
 .../bigpetstore/datamodels/Store.java              |     51 -
 .../bigpetstore/datamodels/Transaction.java        |     68 -
 .../bigpetstore/datamodels/inputs/InputData.java   |     39 -
 .../datamodels/inputs/ProductCategory.java         |    108 -
 .../generators/customer/CustomerLocationPDF.java   |     69 -
 .../generators/customer/CustomerSampler.java       |     54 -
 .../customer/CustomerSamplerBuilder.java           |     81 -
 .../generators/customer/CustomerStorePDF.java      |     41 -
 .../products/ProductBuilderIterator.java           |     80 -
 .../products/ProductCategoryBuilder.java           |    195 -
 .../generators/products/ProductFieldValue.java     |     45 -
 .../generators/products/ProductFilterIterator.java |     72 -
 .../generators/products/ProductIterator.java       |     78 -
 .../products/cartesian/CartesianProduct.java       |     24 -
 .../products/cartesian/CartesianProductBase.java   |     56 -
 .../products/cartesian/CartesianProductField.java  |     79 -
 .../collections/MediumProductCollection.java       |    275 -
 .../collections/SmallProductCollection.java        |    162 -
 .../generators/products/rules/AlwaysTrueRule.java  |     29 -
 .../generators/products/rules/AndRule.java         |     50 -
 .../generators/products/rules/FieldPredicate.java  |     48 -
 .../generators/products/rules/NotRule.java         |     36 -
 .../generators/products/rules/OrRule.java          |     43 -
 .../generators/products/rules/Rule.java            |     23 -
 .../MarkovModelProductCategorySampler.java         |    119 -
 .../generators/purchase/MarkovPurchasingModel.java |     65 -
 .../purchase/MarkovPurchasingModelSampler.java     |     47 -
 .../purchase/MultinomialPurchasingModel.java       |     67 -
 .../MultinomialPurchasingModelSampler.java         |    143 -
 .../generators/purchase/PurchasingModel.java       |     31 -
 .../purchase/PurchasingModelSamplerBuilder.java    |    108 -
 .../generators/purchase/PurchasingProcesses.java   |     39 -
 .../generators/store/StoreLocationIncomePDF.java   |     65 -
 .../store/StoreLocationPopulationPDF.java          |     43 -
 .../bigpetstore/generators/store/StoreSampler.java |     45 -
 .../generators/store/StoreSamplerBuilder.java      |     57 -
 .../transaction/CategoryWeightFunction.java        |     39 -
 .../generators/transaction/CustomerInventory.java  |     65 -
 .../transaction/CustomerInventoryBuilder.java      |     67 -
 .../transaction/CustomerTransactionParameters.java |     73 -
 .../CustomerTransactionParametersBuilder.java      |     58 -
 .../CustomerTransactionParametersSampler.java      |     61 -
 ...ustomerTransactionParametersSamplerBuilder.java |     55 -
 .../transaction/ProductCategoryInventory.java      |     58 -
 .../transaction/ProductCategoryUsageSimulator.java |     72 -
 .../ProductCategoryUsageTrajectory.java            |     74 -
 .../transaction/ProposedPurchaseTimeSampler.java   |     49 -
 .../TransactionPurchasesHiddenMarkovModel.java     |    108 -
 .../TransactionPurchasesSamplerBuilder.java        |     70 -
 .../generators/transaction/TransactionSampler.java |     56 -
 .../transaction/TransactionSamplerBuilder.java     |     95 -
 .../generators/transaction/TransactionTimePDF.java |     31 -
 .../transaction/TransactionTimeSamplerBuilder.java |     56 -
 .../bigpetstore/datamodels/TestProduct.java        |     77 -
 .../customer/TestCustomerLocationPDF.java          |     57 -
 .../generators/customer/TestCustomerSampler.java   |    108 -
 .../customer/TestCustomerSamplerBuilder.java       |     67 -
 .../cartesian/TestCartesianProductBase.java        |     57 -
 .../cartesian/TestCartesianProductField.java       |     74 -
 .../products/rules/TestAlwaysTrueRule.java         |     32 -
 .../generators/products/rules/TestAndRule.java     |     55 -
 .../products/rules/TestFieldPredicate.java         |     44 -
 .../generators/products/rules/TestNotRule.java     |     39 -
 .../generators/products/rules/TestOrRule.java      |     55 -
 .../TestProductCategoryMarkovModelSampler.java     |    108 -
 .../purchase/TestPurchasingModelSampler.java       |    100 -
 .../TestPurchasingModelSamplerBuilder.java         |    100 -
 .../purchase/TestPurchasingProcesses.java          |     70 -
 .../store/TestStoreLocationIncomePDF.java          |     48 -
 .../store/TestStoreLocationPopulationPDF.java      |     48 -
 .../generators/store/TestStoreSampler.java         |     58 -
 .../generators/store/TestStoreSamplerBuilder.java  |     58 -
 .../transaction/TestCustomerInventory.java         |     94 -
 .../transaction/TestCustomerInventoryBuilder.java  |     91 -
 .../TestCustomerTransactionParameters.java         |     69 -
 .../TestCustomerTransactionParametersBuilder.java  |     48 -
 .../TestCustomerTransactionParametersSampler.java  |     47 -
 ...ustomerTransactionParametersSamplerBuilder.java |     47 -
 .../transaction/TestProductCategoryInventory.java  |     75 -
 .../TestProductCategoryUsageSimulator.java         |     53 -
 .../TestProductCategoryUsageTrajectory.java        |     70 -
 .../TestTransactionPurchasesHiddenMarkovModel.java |    205 -
 .../transaction/TestTransactionTimePDF.java        |     35 -
 .../bigtop-location-data/README.md                 |     26 -
 .../bigtop-location-data/build.gradle              |     34 -
 .../bigtop-location-data/settings.gradle           |     16 -
 .../bigtop/datagenerators/locations/Location.java  |     90 -
 .../locations/LocationConstants.java               |     25 -
 .../datagenerators/locations/LocationReader.java   |    172 -
 .../ACS_12_5YR_S1903/ACS_12_5YR_S1903.txt          |     33 -
 .../ACS_12_5YR_S1903/ACS_12_5YR_S1903_metadata.csv |    123 -
 .../ACS_12_5YR_S1903/ACS_12_5YR_S1903_with_ann.csv |  33122 -----
 .../main/resources/input_data/population_data.csv  |  33090 -----
 .../src/main/resources/input_data/zips.csv         |  33179 -----
 .../bigtop-name-generator/README.md                |     27 -
 .../bigtop-name-generator/build.gradle             |     35 -
 .../bigtop-name-generator/settings.gradle          |     16 -
 .../namegenerator/NameGenerator.java               |     41 -
 .../datagenerators/namegenerator/NameReader.java   |     68 -
 .../bigtop/datagenerators/namegenerator/Names.java |     46 -
 .../main/resources/input_data/namedb/data/data.dat | 129036 ------------------
 .../main/resources/input_data/namedb/namedb.info   |     12 -
 .../namegenerator/TestNameGenerator.java           |     39 -
 bigtop-data-generators/bigtop-samplers/README.md   |     25 -
 .../bigtop-samplers/build.gradle                   |     35 -
 .../bigtop-samplers/settings.gradle                |     16 -
 .../datagenerators/samplers/SeedFactory.java       |     38 -
 .../samplers/markovmodels/MarkovModel.java         |     50 -
 .../samplers/markovmodels/MarkovModelBuilder.java  |     53 -
 .../samplers/markovmodels/MarkovProcess.java       |     68 -
 .../ConditionalProbabilityDensityFunction.java     |     21 -
 .../samplers/pdfs/ExponentialPDF.java              |     31 -
 .../datagenerators/samplers/pdfs/GaussianPDF.java  |     37 -
 .../datagenerators/samplers/pdfs/JointPDF.java     |     49 -
 .../samplers/pdfs/MultinomialPDF.java              |     57 -
 .../samplers/pdfs/ProbabilityDensityFunction.java  |     21 -
 .../datagenerators/samplers/pdfs/UniformPDF.java   |     36 -
 .../samplers/BoundedMultiModalGaussianSampler.java |     61 -
 .../samplers/samplers/ConditionalSampler.java      |     21 -
 .../samplers/samplers/DoubleSequenceSampler.java   |     70 -
 .../samplers/samplers/ExponentialSampler.java      |     37 -
 .../samplers/samplers/GammaSampler.java            |     34 -
 .../samplers/samplers/GaussianSampler.java         |     39 -
 .../samplers/samplers/MonteCarloSampler.java       |     55 -
 .../samplers/samplers/RouletteWheelSampler.java    |    111 -
 .../datagenerators/samplers/samplers/Sampler.java  |     21 -
 .../samplers/samplers/SequenceSampler.java         |     70 -
 .../samplers/StatefulMonteCarloSampler.java        |     60 -
 .../samplers/samplers/UniformIntSampler.java       |     43 -
 .../samplers/samplers/UniformSampler.java          |     46 -
 .../samplers/wfs/ConditionalWeightFunction.java    |     21 -
 .../samplers/wfs/DiscreteWeightFunction.java       |     27 -
 .../datagenerators/samplers/wfs/MultinomialWF.java |     67 -
 .../samplers/wfs/WeightFunction.java               |     21 -
 .../markovmodels/TestMarkovModelBuilder.java       |     76 -
 .../samplers/markovmodels/TestMarkovProcess.java   |     53 -
 .../samplers/pdfs/TestMultinomialPDF.java          |     42 -
 .../TestBoundedMultiModalGaussianSampler.java      |     50 -
 .../samplers/samplers/TestExponentialSampler.java  |     41 -
 .../samplers/samplers/TestGaussianSampler.java     |     43 -
 .../samplers/TestRouletteWheelSampler.java         |     71 -
 .../samplers/samplers/TestSequenceSampler.java     |     38 -
 .../samplers/samplers/TestUniformIntSampler.java   |     60 -
 bigtop-data-generators/bigtop-weatherman/README.md |     36 -
 .../bigtop-weatherman/build.gradle                 |     37 -
 .../bigtop-weatherman/settings.gradle              |     16 -
 .../weatherman/WeatherGenerator.java               |     67 -
 .../datagenerators/weatherman/WeatherRecord.java   |     77 -
 .../datagenerators/weatherman/internal/Driver.java |    194 -
 .../weatherman/internal/PrecipitationSampler.java  |     51 -
 .../weatherman/internal/TemperatureSampler.java    |     71 -
 .../weatherman/internal/WeatherConstants.java      |     53 -
 .../internal/WeatherParametersReader.java          |     80 -
 .../weatherman/internal/WeatherRecordBuilder.java  |    163 -
 .../weatherman/internal/WeatherSampler.java        |     50 -
 .../weatherman/internal/WeatherSamplerBuilder.java |     80 -
 .../internal/WeatherStationParameters.java         |    110 -
 .../weatherman/internal/WindSpeedSampler.java      |     74 -
 .../resources/input_data/weather_parameters.csv    |    897 -
 bigtop-data-generators/build.gradle                |     44 -
 bigtop-data-generators/settings.gradle             |     16 -
 bigtop-deploy/juju/hadoop-hbase/.gitignore         |      2 -
 bigtop-deploy/juju/hadoop-hbase/README.md          |    328 -
 bigtop-deploy/juju/hadoop-hbase/bundle-local.yaml  |    142 -
 bigtop-deploy/juju/hadoop-hbase/bundle.yaml        |    142 -
 bigtop-deploy/juju/hadoop-hbase/ci-info.yaml       |     34 -
 bigtop-deploy/juju/hadoop-hbase/copyright          |     16 -
 bigtop-deploy/juju/hadoop-hbase/tests/01-bundle.py |    137 -
 bigtop-deploy/juju/hadoop-hbase/tests/tests.yaml   |     13 -
 bigtop-deploy/juju/hadoop-kafka/.gitignore         |      2 -
 bigtop-deploy/juju/hadoop-kafka/README.md          |    322 -
 bigtop-deploy/juju/hadoop-kafka/bundle-local.yaml  |    165 -
 bigtop-deploy/juju/hadoop-kafka/bundle.yaml        |    165 -
 bigtop-deploy/juju/hadoop-kafka/ci-info.yaml       |     34 -
 bigtop-deploy/juju/hadoop-kafka/copyright          |     16 -
 bigtop-deploy/juju/hadoop-kafka/tests/01-bundle.py |    135 -
 bigtop-deploy/juju/hadoop-kafka/tests/tests.yaml   |     13 -
 bigtop-deploy/juju/hadoop-processing/.gitignore    |      2 -
 bigtop-deploy/juju/hadoop-processing/README.md     |    278 -
 .../juju/hadoop-processing/bundle-local.yaml       |    106 -
 bigtop-deploy/juju/hadoop-processing/bundle.yaml   |    106 -
 bigtop-deploy/juju/hadoop-processing/ci-info.yaml  |     26 -
 bigtop-deploy/juju/hadoop-processing/copyright     |     16 -
 .../juju/hadoop-processing/tests/01-bundle.py      |    121 -
 .../juju/hadoop-processing/tests/tests.yaml        |     13 -
 bigtop-deploy/juju/hadoop-spark/.gitignore         |      2 -
 bigtop-deploy/juju/hadoop-spark/README.md          |    322 -
 bigtop-deploy/juju/hadoop-spark/bundle-local.yaml  |    110 -
 bigtop-deploy/juju/hadoop-spark/bundle.yaml        |    108 -
 bigtop-deploy/juju/hadoop-spark/ci-info.yaml       |     34 -
 bigtop-deploy/juju/hadoop-spark/copyright          |     16 -
 bigtop-deploy/juju/hadoop-spark/tests/01-bundle.py |    137 -
 bigtop-deploy/juju/hadoop-spark/tests/tests.yaml   |     19 -
 bigtop-deploy/juju/spark-processing/README.md      |    234 -
 .../juju/spark-processing/bundle-local.yaml        |     80 -
 bigtop-deploy/juju/spark-processing/bundle.yaml    |     83 -
 bigtop-deploy/juju/spark-processing/ci-info.yaml   |     14 -
 bigtop-deploy/juju/spark-processing/copyright      |     16 -
 .../juju/spark-processing/tests/01-bundle.py       |     90 -
 .../juju/spark-processing/tests/tests.yaml         |     13 -
 bigtop-deploy/puppet/README.md                     |    216 -
 bigtop-deploy/puppet/hiera.yaml                    |      8 -
 bigtop-deploy/puppet/hieradata/bigtop/cluster.yaml |    216 -
 bigtop-deploy/puppet/hieradata/bigtop/ha.yaml      |      7 -
 bigtop-deploy/puppet/hieradata/bigtop/noha.yaml    |      2 -
 bigtop-deploy/puppet/hieradata/bigtop/repo.yaml    |      5 -
 bigtop-deploy/puppet/hieradata/site.yaml           |     46 -
 bigtop-deploy/puppet/manifests/bigtop_repo.pp      |    112 -
 bigtop-deploy/puppet/manifests/cluster.pp          |    296 -
 bigtop-deploy/puppet/manifests/jdk.pp              |     59 -
 bigtop-deploy/puppet/manifests/site.pp             |     43 -
 .../puppet/modules/alluxio/manifests/init.pp       |     81 -
 .../alluxio/templates/alluxio-site.properties      |     36 -
 .../puppet/modules/ambari/manifests/init.pp        |     68 -
 .../modules/ambari/templates/ambari-agent.ini      |     70 -
 bigtop-deploy/puppet/modules/bigtop-util/Gemfile   |     25 -
 .../puppet/modules/bigtop-util/Gemfile.lock        |     51 -
 bigtop-deploy/puppet/modules/bigtop-util/README.md |     38 -
 bigtop-deploy/puppet/modules/bigtop-util/Rakefile  |     18 -
 .../lib/puppet/parser/functions/get_roles.rb       |     63 -
 .../bigtop-util/spec/functions/get_roles_spec.rb   |     60 -
 .../puppet/modules/bigtop_utils/manifests/init.pp  |     29 -
 .../puppet/modules/bigtop_utils/tests/init.pp      |     17 -
 .../puppet/modules/elasticsearch/manifests/init.pp |     51 -
 .../elasticsearch/templates/elasticsearch.yml      |    102 -
 .../puppet/modules/flink/manifests/init.pp         |     68 -
 .../puppet/modules/flink/templates/flink-conf.yaml |     39 -
 .../puppet/modules/giraph/manifests/init.pp        |     34 -
 .../modules/giraph/templates/giraph-site.xml       |     29 -
 bigtop-deploy/puppet/modules/giraph/tests/init.pp  |     17 -
 bigtop-deploy/puppet/modules/gpdb/README.md        |     19 -
 .../puppet/modules/gpdb/manifests/init.pp          |    421 -
 .../puppet/modules/gpdb/templates/gp_dbid          |      3 -
 .../puppet/modules/gpdb/templates/gpssh.conf       |     12 -
 .../puppet/modules/gpdb/templates/init-db.sh       |      4 -
 .../templates/insert-to-faultStrategy-table.sh     |      2 -
 .../templates/insert-to-segmentConfig-table.sh     |      2 -
 .../puppet/modules/gpdb/templates/postmaster.opts  |      1 -
 .../templates/start-master-db-in-admin-mode.sh     |      2 -
 .../puppet/modules/gpdb/templates/stop-db.sh       |      2 -
 .../modules/gpdb/templates/test-master-db.sh       |      2 -
 bigtop-deploy/puppet/modules/gpdb/tests/init.pp    |     23 -
 .../puppet/modules/hadoop/files/hdfs/id_hdfsuser   |     27 -
 .../modules/hadoop/files/hdfs/id_hdfsuser.pub      |      1 -
 .../hadoop/lib/facter/hadoop_storage_dirs.rb       |     31 -
 .../hadoop/lib/facter/hadoop_storage_locations.rb  |     40 -
 .../puppet/modules/hadoop/manifests/init.pp        |    967 -
 .../hadoop/templates/container-executor.cfg        |      7 -
 .../puppet/modules/hadoop/templates/core-site.xml  |    179 -
 .../puppet/modules/hadoop/templates/hadoop-env.sh  |     98 -
 .../puppet/modules/hadoop/templates/hadoop-hdfs    |     26 -
 .../puppet/modules/hadoop/templates/hdfs-site.xml  |    357 -
 .../puppet/modules/hadoop/templates/httpfs-env.sh  |     41 -
 .../modules/hadoop/templates/httpfs-site.xml       |     70 -
 .../puppet/modules/hadoop/templates/kms-env.sh     |    100 -
 .../puppet/modules/hadoop/templates/kms-site.xml   |    181 -
 .../modules/hadoop/templates/mapred-site.xml       |    222 -
 .../modules/hadoop/templates/taskcontroller.cfg    |      5 -
 .../puppet/modules/hadoop/templates/yarn-site.xml  |    245 -
 .../puppet/modules/hadoop_flume/manifests/init.pp  |     43 -
 .../modules/hadoop_flume/templates/flume.conf      |     36 -
 .../puppet/modules/hadoop_flume/tests/init.pp      |     44 -
 .../puppet/modules/hadoop_hbase/manifests/init.pp  |    122 -
 .../modules/hadoop_hbase/templates/hbase-env.sh    |    140 -
 .../modules/hadoop_hbase/templates/hbase-site.xml  |     70 -
 .../modules/hadoop_hbase/templates/jaas.conf       |     24 -
 .../puppet/modules/hadoop_hbase/tests/init.pp      |     29 -
 .../puppet/modules/hadoop_hive/manifests/init.pp   |    121 -
 .../modules/hadoop_hive/templates/hive-site.xml    |    162 -
 .../puppet/modules/hadoop_hive/tests/init.pp       |     18 -
 .../puppet/modules/hadoop_oozie/manifests/init.pp  |     74 -
 .../modules/hadoop_oozie/templates/oozie-site.xml  |    333 -
 .../puppet/modules/hadoop_oozie/tests/init.pp      |     17 -
 .../puppet/modules/hadoop_zookeeper/files/java.env |     16 -
 .../modules/hadoop_zookeeper/manifests/init.pp     |    120 -
 .../hadoop_zookeeper/templates/client-jaas.conf    |     22 -
 .../hadoop_zookeeper/templates/server-jaas.conf    |     25 -
 .../modules/hadoop_zookeeper/templates/zoo.cfg     |     61 -
 .../puppet/modules/hadoop_zookeeper/tests/init.pp  |     20 -
 .../puppet/modules/hcatalog/manifests/init.pp      |     65 -
 .../modules/hcatalog/templates/hcatalog-server     |     23 -
 .../puppet/modules/hcatalog/templates/webhcat.xml  |    228 -
 .../puppet/modules/hcatalog/tests/init.pp          |     17 -
 .../puppet/modules/ignite_hadoop/manifests/init.pp |     83 -
 .../modules/ignite_hadoop/templates/core-site.xml  |     65 -
 .../ignite_hadoop/templates/default-config.xml     |    191 -
 .../modules/ignite_hadoop/templates/hive-site.xml  |     33 -
 .../modules/ignite_hadoop/templates/ignite-hadoop  |     21 -
 .../ignite_hadoop/templates/mapred-site.xml        |     63 -
 .../puppet/modules/ignite_hadoop/tests/init.pp     |     16 -
 .../puppet/modules/kafka/manifests/init.pp         |     58 -
 .../modules/kafka/templates/server.properties      |    138 -
 .../modules/kerberos/lib/facter/kadm_keytab.rb     |     21 -
 .../puppet/modules/kerberos/manifests/init.pp      |    231 -
 .../puppet/modules/kerberos/templates/kadm5.acl    |     21 -
 .../puppet/modules/kerberos/templates/kdc.conf     |     35 -
 .../puppet/modules/kerberos/templates/krb5.conf    |     41 -
 .../puppet/modules/kerberos/tests/init.pp          |     31 -
 .../puppet/modules/kibana/manifests/init.pp        |     46 -
 .../kibana/templates/kibana.example.org.crt        |     24 -
 .../kibana/templates/kibana.example.org.key        |     28 -
 .../puppet/modules/kibana/templates/kibana.yml     |    105 -
 .../puppet/modules/kibana/templates/start-kibana   |    138 -
 .../puppet/modules/livy/manifests/init.pp          |     55 -
 .../puppet/modules/livy/templates/livy-env.sh      |     37 -
 .../puppet/modules/livy/templates/livy.conf        |    141 -
 .../puppet/modules/livy/templates/log4j.properties |     24 -
 .../puppet/modules/logstash/manifests/init.pp      |     28 -
 .../puppet/modules/mahout/manifests/init.pp        |     32 -
 bigtop-deploy/puppet/modules/mahout/tests/init.pp  |     17 -
 .../puppet/modules/nfs/manifests/client.pp         |     19 -
 .../puppet/modules/nfs/manifests/client/install.pp |     20 -
 .../puppet/modules/nfs/manifests/client/params.pp  |     47 -
 .../puppet/modules/nfs/manifests/client/service.pp |     34 -
 bigtop-deploy/puppet/modules/nfs/manifests/init.pp |     18 -
 bigtop-deploy/puppet/modules/qfs/README.md         |     80 -
 bigtop-deploy/puppet/modules/qfs/manifests/init.pp |    176 -
 .../puppet/modules/qfs/templates/ChunkServer.prp   |    280 -
 .../puppet/modules/qfs/templates/MetaServer.prp    |   1179 -
 .../puppet/modules/qfs/templates/QfsClient.prp     |    137 -
 .../puppet/modules/qfs/templates/hadoop-qfs        |     79 -
 .../puppet/modules/solr/manifests/init.pp          |     65 -
 .../puppet/modules/solr/templates/jaas.conf        |     24 -
 bigtop-deploy/puppet/modules/solr/templates/solr   |     33 -
 bigtop-deploy/puppet/modules/solr/tests/init.pp    |     22 -
 .../puppet/modules/spark/manifests/init.pp         |    192 -
 .../modules/spark/templates/spark-defaults.conf    |     38 -
 .../puppet/modules/spark/templates/spark-env.sh    |     42 -
 bigtop-deploy/puppet/modules/spark/tests/init.pp   |     25 -
 .../puppet/modules/sqoop/manifests/init.pp         |     22 -
 bigtop-deploy/puppet/modules/sqoop/tests/init.pp   |     16 -
 .../puppet/modules/sqoop2/manifests/init.pp        |     46 -
 bigtop-deploy/puppet/modules/sqoop2/tests/init.pp  |     17 -
 bigtop-deploy/puppet/modules/tez/manifests/init.pp |     34 -
 .../puppet/modules/tez/templates/environment       |     17 -
 bigtop-deploy/puppet/modules/tez/tests/init.pp     |     17 -
 .../puppet/modules/ycsb/manifests/init.pp          |     29 -
 bigtop-deploy/puppet/modules/ycsb/tests/init.pp    |     17 -
 .../puppet/modules/zeppelin/manifests/init.pp      |     59 -
 .../modules/zeppelin/templates/interpreter.json    |    151 -
 .../modules/zeppelin/templates/zeppelin-env.sh     |     25 -
 .../puppet/modules/zeppelin/tests/init.pp          |     19 -
 bigtop-packages/src/charm/README.md                |     77 -
 .../src/charm/giraph/layer-giraph/README.md        |     91 -
 .../src/charm/giraph/layer-giraph/actions.yaml     |      2 -
 .../charm/giraph/layer-giraph/actions/smoke-test   |     71 -
 .../src/charm/giraph/layer-giraph/copyright        |     16 -
 .../src/charm/giraph/layer-giraph/icon.svg         |    197 -
 .../src/charm/giraph/layer-giraph/layer.yaml       |      7 -
 .../src/charm/giraph/layer-giraph/metadata.yaml    |     16 -
 .../charm/giraph/layer-giraph/reactive/giraph.py   |    115 -
 .../giraph/layer-giraph/resources/tiny_graph.txt   |      5 -
 .../layer-giraph/tests/01-basic-deployment.py      |     35 -
 .../giraph/layer-giraph/tests/01-giraph-test.py    |     61 -
 .../src/charm/giraph/layer-giraph/tests/tests.yaml |      3 -
 .../charm/hadoop/layer-hadoop-namenode/.gitignore  |      4 -
 .../charm/hadoop/layer-hadoop-namenode/README.md   |    129 -
 .../hadoop/layer-hadoop-namenode/actions.yaml      |      2 -
 .../layer-hadoop-namenode/actions/smoke-test       |     62 -
 .../charm/hadoop/layer-hadoop-namenode/copyright   |     16 -
 .../charm/hadoop/layer-hadoop-namenode/layer.yaml  |     27 -
 .../hadoop/layer-hadoop-namenode/metadata.yaml     |     18 -
 .../hadoop/layer-hadoop-namenode/metrics.yaml      |     13 -
 .../layer-hadoop-namenode/reactive/namenode.py     |    219 -
 .../tests/01-basic-deployment.py                   |     39 -
 .../hadoop/layer-hadoop-namenode/tests/tests.yaml  |      3 -
 .../hadoop/layer-hadoop-namenode/wheelhouse.txt    |      1 -
 .../src/charm/hadoop/layer-hadoop-plugin/README.md |    119 -
 .../charm/hadoop/layer-hadoop-plugin/actions.yaml  |      2 -
 .../hadoop/layer-hadoop-plugin/actions/smoke-test  |     62 -
 .../src/charm/hadoop/layer-hadoop-plugin/copyright |     16 -
 .../charm/hadoop/layer-hadoop-plugin/layer.yaml    |     12 -
 .../charm/hadoop/layer-hadoop-plugin/metadata.yaml |     20 -
 .../reactive/apache_bigtop_plugin.py               |    149 -
 .../tests/01-basic-deployment.py                   |     46 -
 .../hadoop/layer-hadoop-plugin/tests/tests.yaml    |      3 -
 .../hadoop/layer-hadoop-resourcemanager/.gitignore |      4 -
 .../hadoop/layer-hadoop-resourcemanager/README.md  |    180 -
 .../layer-hadoop-resourcemanager/actions.yaml      |    129 -
 .../layer-hadoop-resourcemanager/actions/mrbench   |     77 -
 .../layer-hadoop-resourcemanager/actions/nnbench   |     81 -
 .../actions/parseBenchmark.py                      |     44 -
 .../actions/smoke-test                             |     48 -
 .../layer-hadoop-resourcemanager/actions/teragen   |     63 -
 .../layer-hadoop-resourcemanager/actions/terasort  |     84 -
 .../layer-hadoop-resourcemanager/actions/testdfsio |     82 -
 .../hadoop/layer-hadoop-resourcemanager/copyright  |     16 -
 .../hadoop/layer-hadoop-resourcemanager/layer.yaml |     32 -
 .../layer-hadoop-resourcemanager/metadata.yaml     |     20 -
 .../layer-hadoop-resourcemanager/metrics.yaml      |      5 -
 .../reactive/resourcemanager.py                    |    276 -
 .../tests/01-basic-deployment.py                   |     39 -
 .../layer-hadoop-resourcemanager/tests/tests.yaml  |      3 -
 .../layer-hadoop-resourcemanager/wheelhouse.txt    |      1 -
 .../src/charm/hadoop/layer-hadoop-slave/README.md  |    125 -
 .../charm/hadoop/layer-hadoop-slave/actions.yaml   |      3 -
 .../hadoop/layer-hadoop-slave/actions/smoke-test   |     48 -
 .../src/charm/hadoop/layer-hadoop-slave/copyright  |     16 -
 .../src/charm/hadoop/layer-hadoop-slave/layer.yaml |      4 -
 .../charm/hadoop/layer-hadoop-slave/metadata.yaml  |      9 -
 .../layer-hadoop-slave/reactive/hadoop_status.py   |     54 -
 .../tests/01-basic-deployment.py                   |     39 -
 .../hadoop/layer-hadoop-slave/tests/tests.yaml     |      3 -
 .../src/charm/hbase/layer-hbase/README.md          |    235 -
 .../src/charm/hbase/layer-hbase/actions.yaml       |     23 -
 .../src/charm/hbase/layer-hbase/actions/perf-test  |     46 -
 .../src/charm/hbase/layer-hbase/actions/restart    |     34 -
 .../src/charm/hbase/layer-hbase/actions/smoke-test |     49 -
 .../src/charm/hbase/layer-hbase/actions/start      |     35 -
 .../hbase/layer-hbase/actions/start-hbase-master   |     35 -
 .../layer-hbase/actions/start-hbase-regionserver   |     37 -
 .../src/charm/hbase/layer-hbase/actions/stop       |     35 -
 .../hbase/layer-hbase/actions/stop-hbase-master    |     35 -
 .../layer-hbase/actions/stop-hbase-regionserver    |     37 -
 .../src/charm/hbase/layer-hbase/config.yaml        |      6 -
 .../src/charm/hbase/layer-hbase/copyright          |     16 -
 .../src/charm/hbase/layer-hbase/icon.svg           |     23 -
 .../src/charm/hbase/layer-hbase/layer.yaml         |     35 -
 .../layer-hbase/lib/charms/layer/bigtop_hbase.py   |    110 -
 .../src/charm/hbase/layer-hbase/metadata.yaml      |     18 -
 .../src/charm/hbase/layer-hbase/reactive/hbase.py  |    220 -
 .../hbase/layer-hbase/tests/01-basic-deployment.py |     38 -
 .../charm/hbase/layer-hbase/tests/02-smoke-test.py |     57 -
 .../src/charm/hbase/layer-hbase/tests/tests.yaml   |      3 -
 .../src/charm/hbase/layer-hbase/wheelhouse.txt     |      1 -
 .../src/charm/hive/layer-hive/README.md            |    237 -
 .../src/charm/hive/layer-hive/actions.yaml         |      4 -
 .../src/charm/hive/layer-hive/actions/restart      |     35 -
 .../src/charm/hive/layer-hive/actions/smoke-test   |     48 -
 .../src/charm/hive/layer-hive/config.yaml          |      6 -
 .../src/charm/hive/layer-hive/copyright            |     16 -
 bigtop-packages/src/charm/hive/layer-hive/icon.svg |     75 -
 .../src/charm/hive/layer-hive/layer.yaml           |     32 -
 .../layer-hive/lib/charms/layer/bigtop_hive.py     |    171 -
 .../src/charm/hive/layer-hive/metadata.yaml        |     21 -
 .../src/charm/hive/layer-hive/reactive/hive.py     |    194 -
 .../hive/layer-hive/tests/01-basic-deployment.py   |     39 -
 .../charm/hive/layer-hive/tests/02-smoke-test.py   |     60 -
 .../src/charm/hive/layer-hive/tests/tests.yaml     |      3 -
 .../src/charm/kafka/layer-kafka/README.md          |    221 -
 .../src/charm/kafka/layer-kafka/actions.yaml       |     44 -
 .../charm/kafka/layer-kafka/actions/create-topic   |     51 -
 .../charm/kafka/layer-kafka/actions/kafkautils.py  |     38 -
 .../charm/kafka/layer-kafka/actions/list-topics    |     43 -
 .../src/charm/kafka/layer-kafka/actions/list-zks   |     37 -
 .../src/charm/kafka/layer-kafka/actions/read-topic |     53 -
 .../src/charm/kafka/layer-kafka/actions/smoke-test |     65 -
 .../charm/kafka/layer-kafka/actions/write-topic    |     52 -
 .../src/charm/kafka/layer-kafka/config.yaml        |     10 -
 .../src/charm/kafka/layer-kafka/copyright          |     16 -
 .../src/charm/kafka/layer-kafka/icon.svg           |     21 -
 .../src/charm/kafka/layer-kafka/layer.yaml         |     20 -
 .../layer-kafka/lib/charms/layer/bigtop_kafka.py   |     96 -
 .../src/charm/kafka/layer-kafka/metadata.yaml      |     23 -
 .../src/charm/kafka/layer-kafka/reactive/kafka.py  |    136 -
 .../src/charm/kafka/layer-kafka/tests/01-deploy.py |     47 -
 .../charm/kafka/layer-kafka/tests/02-smoke-test.py |     50 -
 .../kafka/layer-kafka/tests/10-config-changed.py   |    120 -
 .../src/charm/kafka/layer-kafka/tests/tests.yaml   |      3 -
 .../src/charm/mahout/layer-mahout/README.md        |    117 -
 .../src/charm/mahout/layer-mahout/actions.yaml     |      2 -
 .../charm/mahout/layer-mahout/actions/smoke-test   |     65 -
 .../src/charm/mahout/layer-mahout/copyright        |     16 -
 .../src/charm/mahout/layer-mahout/icon.svg         |    411 -
 .../src/charm/mahout/layer-mahout/layer.yaml       |      6 -
 .../src/charm/mahout/layer-mahout/metadata.yaml    |     15 -
 .../charm/mahout/layer-mahout/reactive/mahout.py   |     40 -
 .../layer-mahout/resources/links-converted.txt     |    426 -
 .../charm/mahout/layer-mahout/resources/users.txt  |    247 -
 .../mahout/layer-mahout/tests/01-mahout-test.py    |     60 -
 .../src/charm/mahout/layer-mahout/tests/tests.yaml |      3 -
 .../src/charm/spark/layer-spark/README.md          |    353 -
 .../src/charm/spark/layer-spark/actions.yaml       |     85 -
 .../spark/layer-spark/actions/connectedcomponent   |      1 -
 .../charm/spark/layer-spark/actions/decisiontree   |      1 -
 .../src/charm/spark/layer-spark/actions/kmeans     |      1 -
 .../spark/layer-spark/actions/linearregression     |      1 -
 .../src/charm/spark/layer-spark/actions/list-jobs  |     30 -
 .../spark/layer-spark/actions/logisticregression   |      1 -
 .../spark/layer-spark/actions/matrixfactorization  |      1 -
 .../src/charm/spark/layer-spark/actions/pagerank   |    139 -
 .../src/charm/spark/layer-spark/actions/pca        |      1 -
 .../spark/layer-spark/actions/pregeloperation      |      1 -
 .../src/charm/spark/layer-spark/actions/reinstall  |     59 -
 .../src/charm/spark/layer-spark/actions/remove-job |     24 -
 .../actions/restart-spark-job-history-server       |     37 -
 .../charm/spark/layer-spark/actions/shortestpaths  |      1 -
 .../src/charm/spark/layer-spark/actions/smoke-test |      1 -
 .../charm/spark/layer-spark/actions/spark-submit   |     45 -
 .../src/charm/spark/layer-spark/actions/sparkbench |    153 -
 .../src/charm/spark/layer-spark/actions/sparkpi    |    119 -
 .../src/charm/spark/layer-spark/actions/sql        |      1 -
 .../actions/start-spark-job-history-server         |     37 -
 .../actions/stop-spark-job-history-server          |     37 -
 .../layer-spark/actions/stronglyconnectedcomponent |      1 -
 .../src/charm/spark/layer-spark/actions/submit     |      1 -
 .../charm/spark/layer-spark/actions/svdplusplus    |      1 -
 .../src/charm/spark/layer-spark/actions/svm        |      1 -
 .../src/charm/spark/layer-spark/config.yaml        |     39 -
 .../src/charm/spark/layer-spark/copyright          |     16 -
 .../src/charm/spark/layer-spark/icon.svg           |     13 -
 .../src/charm/spark/layer-spark/layer.yaml         |     38 -
 .../layer-spark/lib/charms/layer/bigtop_spark.py   |    415 -
 .../src/charm/spark/layer-spark/metadata.yaml      |     24 -
 .../src/charm/spark/layer-spark/reactive/spark.py  |    320 -
 .../src/charm/spark/layer-spark/scripts/sparkpi.sh |     23 -
 .../spark/layer-spark/tests/01-basic-deployment.py |     42 -
 .../charm/spark/layer-spark/tests/02-smoke-test.py |     48 -
 .../spark/layer-spark/tests/03-scale-standalone.py |     74 -
 .../spark/layer-spark/tests/04-test-config.py      |     48 -
 .../charm/spark/layer-spark/tests/10-test-ha.py    |     92 -
 .../src/charm/spark/layer-spark/tests/tests.yaml   |      3 -
 .../src/charm/spark/layer-spark/wheelhouse.txt     |      1 -
 .../src/charm/zeppelin/layer-zeppelin/README.md    |    165 -
 .../src/charm/zeppelin/layer-zeppelin/actions.yaml |     10 -
 .../zeppelin/layer-zeppelin/actions/reinstall      |     58 -
 .../charm/zeppelin/layer-zeppelin/actions/restart  |     36 -
 .../zeppelin/layer-zeppelin/actions/smoke-test     |     85 -
 .../src/charm/zeppelin/layer-zeppelin/copyright    |     16 -
 .../src/charm/zeppelin/layer-zeppelin/icon.svg     |     20 -
 .../src/charm/zeppelin/layer-zeppelin/layer.yaml   |     34 -
 .../lib/charms/layer/bigtop_zeppelin.py            |    304 -
 .../charm/zeppelin/layer-zeppelin/metadata.yaml    |     19 -
 .../zeppelin/layer-zeppelin/reactive/zeppelin.py   |    216 -
 .../resources/flume-tutorial/note.json             |      1 -
 .../resources/hdfs-tutorial/note.json              |    340 -
 .../layer-zeppelin/tests/01-basic-deployment.py    |     43 -
 .../layer-zeppelin/tests/02-zeppelin-smoke.py      |     48 -
 .../tests/03-zeppelin-spark-smoke.py               |     63 -
 .../layer-zeppelin/tests/04-zeppelin-config.py     |     48 -
 .../charm/zeppelin/layer-zeppelin/tests/tests.yaml |      3 -
 .../charm/zeppelin/layer-zeppelin/wheelhouse.txt   |      1 -
 .../src/charm/zookeeper/layer-zookeeper/README.md  |    186 -
 .../charm/zookeeper/layer-zookeeper/actions.yaml   |      4 -
 .../zookeeper/layer-zookeeper/actions/restart      |     39 -
 .../zookeeper/layer-zookeeper/actions/smoke-test   |     42 -
 .../charm/zookeeper/layer-zookeeper/config.yaml    |    111 -
 .../src/charm/zookeeper/layer-zookeeper/copyright  |     16 -
 .../layer-zookeeper/files/check_zookeeper.py       |    356 -
 .../src/charm/zookeeper/layer-zookeeper/icon.svg   |     38 -
 .../src/charm/zookeeper/layer-zookeeper/layer.yaml |     17 -
 .../lib/charms/layer/bigtop_zookeeper.py           |    197 -
 .../charm/zookeeper/layer-zookeeper/metadata.yaml  |     25 -
 .../charm/zookeeper/layer-zookeeper/metrics.yaml   |      5 -
 .../layer-zookeeper/reactive/zookeeper.py          |    367 -
 .../layer-zookeeper/tests/01-deploy-smoke.py       |     69 -
 .../layer-zookeeper/tests/10-bind-address.py       |    122 -
 .../layer-zookeeper/tests/20-snapshots.py          |     89 -
 .../zookeeper/layer-zookeeper/tests/tests.yaml     |      3 -
 .../src/common/alluxio/alluxio-master.svc          |     75 -
 .../src/common/alluxio/alluxio-worker.svc          |     79 -
 .../src/common/alluxio/do-component-build          |     25 -
 bigtop-packages/src/common/alluxio/init.d.tmpl     |    315 -
 .../src/common/alluxio/install_alluxio.sh          |    163 -
 .../common/alluxio/patch0-fix-license-check.diff   |     12 -
 .../alluxio/patch1-fix-hadoop-version-check.diff   |     13 -
 .../src/common/ambari/ambari-server.svc            |     14 -
 bigtop-packages/src/common/ambari/ambari.defaults  |     14 -
 .../src/common/ambari/do-component-build           |     28 -
 .../src/common/ambari/install_ambari.sh            |    158 -
 .../src/common/ambari/patch0-AMBARI.diff           |     13 -
 .../src/common/ambari/patch1-AMBARI-storm.diff     |     12 -
 .../src/common/ambari/patch2-AMBARI-phantomjs.diff |     36 -
 .../src/common/ambari/patch3-AMBARI-25499.diff     |     13 -
 .../src/common/ambari/patch4-AMBARI-CentOS8.diff   |     68 -
 .../patch5-AMBARI-spring-boot-maven-plugin.diff    |     14 -
 .../src/common/bigtop-groovy/do-component-build    |     19 -
 .../src/common/bigtop-groovy/install_groovy.sh     |    106 -
 .../src/common/bigtop-jsvc/do-component-build      |     27 -
 .../src/common/bigtop-jsvc/install_jsvc.sh         |     89 -
 ...h-9e679f88e690f65a81e4590e172061f0f6be5a4d.diff |     80 -
 .../src/common/bigtop-jsvc/patch2-DAEMON-349.diff  |   3352 -
 .../src/common/bigtop-tomcat/do-component-build    |     26 -
 .../src/common/bigtop-tomcat/install_tomcat.sh     |     90 -
 .../common/bigtop-utils/bigtop-detect-classpath    |     34 -
 .../src/common/bigtop-utils/bigtop-detect-javahome |     75 -
 .../src/common/bigtop-utils/bigtop-detect-javalibs |     51 -
 .../src/common/bigtop-utils/bigtop-monitor-service |     93 -
 .../src/common/bigtop-utils/bigtop-utils.default   |     23 -
 .../src/common/elasticsearch/do-component-build    |     23 -
 .../src/common/elasticsearch/elasticsearch.default |     65 -
 .../src/common/elasticsearch/elasticsearch.init    |    220 -
 .../common/elasticsearch/install_elasticsearch.sh  |    148 -
 .../src/common/flink/do-component-build            |     26 -
 .../src/common/flink/flink-jobmanager.svc          |     82 -
 .../src/common/flink/flink-taskmanager.svc         |     82 -
 bigtop-packages/src/common/flink/install_flink.sh  |    141 -
 .../flink/patch0-fix-ApplicationReport-api.diff    |     12 -
 .../src/common/flume/do-component-build            |     36 -
 .../src/common/flume/flume-agent.default           |     38 -
 bigtop-packages/src/common/flume/flume-agent.init  |    266 -
 bigtop-packages/src/common/flume/install_flume.sh  |    175 -
 .../src/common/flume/patch0-FLUME-2662.diff        |     13 -
 .../src/common/flume/patch3-FLUME-3354.diff        |     54 -
 .../src/common/flume/patch4-FLUME-3355.diff        |     61 -
 .../src/common/giraph/do-component-build           |     25 -
 .../src/common/giraph/install_giraph.sh            |    158 -
 .../src/common/giraph/patch0-GIRAPH-1110.diff      |     20 -
 .../src/common/giraph/patch1-remove-findbugs.diff  |    374 -
 .../src/common/giraph/patch2-fix-pom.diff          |     21 -
 bigtop-packages/src/common/gpdb/do-component-build |     21 -
 .../src/common/gpdb/do-component-configure         |     19 -
 bigtop-packages/src/common/gpdb/install_gpdb.sh    |     19 -
 .../src/common/gpdb/patch0-DISTDIR.diff            |     27 -
 .../common/gpdb/patch1-specify-python-version.diff |   2793 -
 ...fix-deprecated-first-element-index-in-perl.diff |    106 -
 .../src/common/hadoop/conf.empty/core-site.xml     |     21 -
 .../src/common/hadoop/conf.empty/hdfs-site.xml     |     25 -
 .../src/common/hadoop/conf.empty/mapred-site.xml   |     21 -
 .../src/common/hadoop/conf.empty/yarn-site.xml     |     65 -
 .../src/common/hadoop/conf.pseudo/README           |     22 -
 .../src/common/hadoop/conf.pseudo/core-site.xml    |     46 -
 .../hadoop/conf.pseudo/hadoop-metrics.properties   |     80 -
 .../src/common/hadoop/conf.pseudo/hdfs-site.xml    |     51 -
 .../src/common/hadoop/conf.pseudo/mapred-site.xml  |     46 -
 .../src/common/hadoop/conf.pseudo/yarn-site.xml    |     70 -
 .../common/hadoop/conf.secure/configuration.xsl    |     40 -
 .../src/common/hadoop/conf.secure/core-site.xml    |    116 -
 .../src/common/hadoop/conf.secure/hadoop-env.sh    |     56 -
 .../hadoop/conf.secure/hadoop-metrics.properties   |     59 -
 .../common/hadoop/conf.secure/hadoop-policy.xml    |    122 -
 .../src/common/hadoop/conf.secure/hdfs-site.xml    |    242 -
 .../src/common/hadoop/conf.secure/log4j.properties |    126 -
 .../hadoop/conf.secure/mapred-queue-acls.xml       |     63 -
 .../src/common/hadoop/conf.secure/mapred-site.xml  |    259 -
 .../src/common/hadoop/conf.secure/masters          |     16 -
 .../src/common/hadoop/conf.secure/slaves           |     16 -
 .../common/hadoop/conf.secure/taskcontroller.cfg   |     18 -
 .../src/common/hadoop/do-component-build           |    148 -
 .../src/common/hadoop/hadoop-fuse-dfs.1            |    107 -
 .../src/common/hadoop/hadoop-fuse.default          |     16 -
 .../src/common/hadoop/hadoop-hdfs-datanode.svc     |     71 -
 .../src/common/hadoop/hadoop-hdfs-journalnode.svc  |     23 -
 .../src/common/hadoop/hadoop-hdfs-namenode.svc     |     51 -
 .../hadoop/hadoop-hdfs-secondarynamenode.svc       |     23 -
 .../src/common/hadoop/hadoop-hdfs-zkfc.svc         |     23 -
 .../src/common/hadoop/hadoop-httpfs.svc            |     94 -
 bigtop-packages/src/common/hadoop/hadoop-kms.svc   |     94 -
 bigtop-packages/src/common/hadoop/hadoop-layout.sh |     30 -
 .../hadoop/hadoop-mapreduce-historyserver.svc      |     24 -
 .../src/common/hadoop/hadoop-yarn-nodemanager.svc  |     24 -
 .../src/common/hadoop/hadoop-yarn-proxyserver.svc  |     24 -
 .../common/hadoop/hadoop-yarn-resourcemanager.svc  |     24 -
 .../common/hadoop/hadoop-yarn-timelineserver.svc   |     24 -
 bigtop-packages/src/common/hadoop/hadoop.1         |    332 -
 bigtop-packages/src/common/hadoop/hadoop.default   |     24 -
 bigtop-packages/src/common/hadoop/hdfs.1           |     16 -
 bigtop-packages/src/common/hadoop/hdfs.conf        |     17 -
 bigtop-packages/src/common/hadoop/hdfs.default     |     24 -
 .../src/common/hadoop/httpfs-tomcat-deployment.sh  |     38 -
 bigtop-packages/src/common/hadoop/httpfs.default   |     24 -
 bigtop-packages/src/common/hadoop/init-hcfs.groovy |    343 -
 bigtop-packages/src/common/hadoop/init-hcfs.json   |    104 -
 bigtop-packages/src/common/hadoop/init-hdfs.sh     |     35 -
 .../src/common/hadoop/install_hadoop.sh            |    439 -
 .../src/common/hadoop/kms-tomcat-deployment.sh     |     37 -
 bigtop-packages/src/common/hadoop/kms.default      |     24 -
 bigtop-packages/src/common/hadoop/mapred.1         |     16 -
 bigtop-packages/src/common/hadoop/mapreduce.conf   |     17 -
 .../src/common/hadoop/mapreduce.default            |     18 -
 .../src/common/hadoop/patch1-YARN-9945.diff        |     18 -
 .../src/common/hadoop/patch2-HADOOP-14597.diff     |    143 -
 .../src/common/hadoop/patch3-HADOOP-15062.diff     |    105 -
 .../src/common/hadoop/patch3-HADOOP-16739.diff     |     40 -
 .../src/common/hadoop/patch4-HADOOP-16647.diff     |     81 -
 bigtop-packages/src/common/hadoop/yarn.1           |     16 -
 bigtop-packages/src/common/hadoop/yarn.conf        |     17 -
 bigtop-packages/src/common/hadoop/yarn.default     |     18 -
 .../src/common/hbase/do-component-build            |     55 -
 bigtop-packages/src/common/hbase/hbase.1           |     88 -
 bigtop-packages/src/common/hbase/hbase.default     |     41 -
 .../src/common/hbase/hbase.nofiles.conf            |     16 -
 bigtop-packages/src/common/hbase/hbase.svc         |     92 -
 bigtop-packages/src/common/hbase/install_hbase.sh  |    168 -
 .../src/common/hbase/patch0-jline-jrub.diff        |     15 -
 .../src/common/hbase/patch1-HBASE-19663.diff       |     29 -
 .../src/common/hbase/regionserver-init.d.tpl       |    449 -
 bigtop-packages/src/common/hive/do-component-build |     39 -
 .../src/common/hive/hive-hcatalog-server.default   |     25 -
 .../src/common/hive/hive-hcatalog-server.svc       |     23 -
 bigtop-packages/src/common/hive/hive-hcatalog.1    |     71 -
 .../src/common/hive/hive-metastore.default         |     20 -
 bigtop-packages/src/common/hive/hive-metastore.svc |     75 -
 .../src/common/hive/hive-server2.default           |     20 -
 bigtop-packages/src/common/hive/hive-server2.svc   |     82 -
 bigtop-packages/src/common/hive/hive-site.xml      |     42 -
 .../src/common/hive/hive-webhcat-server.default    |     22 -
 .../src/common/hive/hive-webhcat-server.svc        |     24 -
 bigtop-packages/src/common/hive/hive.1             |    103 -
 bigtop-packages/src/common/hive/install_hive.sh    |    264 -
 .../src/common/hive/patch0-HIVE-16302.diff         |     16 -
 .../src/common/hive/patch1-HIVE-22839.diff         |     28 -
 .../src/common/hive/patch2-HIVE-18436.diff         |     73 -
 .../src/common/hive/patch3-HIVE-23303.diff         |     37 -
 .../hive/patch4-HIVE-16402-hadoop-2.10.0.diff      |    251 -
 .../src/common/ignite-hadoop/do-component-build    |     44 -
 .../src/common/ignite-hadoop/ignite-hadoop.1       |     59 -
 .../src/common/ignite-hadoop/ignite-hadoop.default |     24 -
 .../src/common/ignite-hadoop/ignite-hadoop.svc     |     63 -
 .../src/common/ignite-hadoop/install_ignite.sh     |    149 -
 .../common/ignite-hadoop/patch0-shmem-config.diff  |     14 -
 .../src/common/kafka/do-component-build            |     38 -
 bigtop-packages/src/common/kafka/install_kafka.sh  |    171 -
 bigtop-packages/src/common/kafka/kafka-server.svc  |     95 -
 bigtop-packages/src/common/kafka/kafka.default     |      0
 .../src/common/kibana/do-component-build           |     30 -
 .../src/common/kibana/install_kibana.sh            |    125 -
 .../src/common/kibana/patch0-kibana-arm64.diff     |     36 -
 .../src/common/kibana/patch1-kibana-git-init.diff  |     23 -
 bigtop-packages/src/common/livy/do-component-build |     34 -
 bigtop-packages/src/common/livy/install_livy.sh    |    109 -
 bigtop-packages/src/common/livy/livy-server.svc    |     79 -
 .../livy/patch0-python-executable-version.diff     |     22 -
 .../src/common/logstash/do-component-build         |     56 -
 .../src/common/logstash/install_logstash.sh        |    135 -
 .../src/common/logstash/log4j2.properties          |     31 -
 .../src/common/logstash/logstash.default           |     18 -
 .../common/logstash/patch0-logstash-for-arm64.diff |   2393 -
 .../src/common/mahout/do-component-build           |     32 -
 .../src/common/mahout/install_mahout.sh            |    146 -
 .../src/common/oozie/catalina.properties           |     81 -
 bigtop-packages/src/common/oozie/context.xml       |     35 -
 .../src/common/oozie/do-component-build            |    169 -
 bigtop-packages/src/common/oozie/hive.xml          |     30 -
 bigtop-packages/src/common/oozie/install_oozie.sh  |    239 -
 bigtop-packages/src/common/oozie/oozie-env.sh      |     77 -
 bigtop-packages/src/common/oozie/oozie-site.xml    |     92 -
 bigtop-packages/src/common/oozie/oozie.1           |     45 -
 bigtop-packages/src/common/oozie/oozie.init        |    148 -
 ...204-remove-h2-pig-classifier-in-Oozie-code.diff |     27 -
 .../common/oozie/patch1-BIGTOP-3099-log4j2.diff    |    287 -
 .../common/oozie/patch2-server-xml-tomcat85.diff   |     17 -
 .../common/oozie/patch3-OOZIE-3502-branch-4.3.diff |     34 -
 .../src/common/oozie/tomcat-deployment.sh          |     37 -
 .../src/common/phoenix/do-component-build          |     39 -
 .../src/common/phoenix/install_phoenix.sh          |    144 -
 .../src/common/phoenix/patch0-PHOENIX-5711.diff    |     22 -
 .../src/common/phoenix/phoenix-queryserver.default |     20 -
 .../src/common/phoenix/phoenix-queryserver.svc     |     79 -
 bigtop-packages/src/common/phoenix/phoenix.default |     20 -
 bigtop-packages/src/common/qfs/do-component-build  |    129 -
 bigtop-packages/src/common/qfs/install_qfs.sh      |    238 -
 bigtop-packages/src/common/qfs/patch0-gf_cpu.diff  |     16 -
 .../src/common/qfs/patch1-maven-surefire.diff      |     29 -
 .../common/qfs/patch2-specify-python-version.diff  |     20 -
 bigtop-packages/src/common/qfs/qfs-chunkserver.cfg |     24 -
 bigtop-packages/src/common/qfs/qfs-chunkserver.svc |     94 -
 bigtop-packages/src/common/qfs/qfs-metaserver.cfg  |     24 -
 bigtop-packages/src/common/qfs/qfs-metaserver.svc  |     94 -
 bigtop-packages/src/common/qfs/qfs-webui.cfg       |     24 -
 bigtop-packages/src/common/qfs/qfs-webui.svc       |     94 -
 bigtop-packages/src/common/solr/do-component-build |     56 -
 bigtop-packages/src/common/solr/install_solr.sh    |    154 -
 bigtop-packages/src/common/solr/logging.properties |     64 -
 bigtop-packages/src/common/solr/schema.xml         |   1118 -
 .../src/common/solr/solr-server.init.debian        |    187 -
 bigtop-packages/src/common/solr/solr.default       |    141 -
 bigtop-packages/src/common/solr/solrconfig.xml     |   1817 -
 bigtop-packages/src/common/solr/solrctl.sh         |    408 -
 bigtop-packages/src/common/solr/solrd              |    141 -
 bigtop-packages/src/common/spark/LICENSE-binary    |    517 -
 .../src/common/spark/do-component-build            |     43 -
 bigtop-packages/src/common/spark/install_spark.sh  |    208 -
 .../spark/licenses-binary/LICENSE-AnchorJS.txt     |     21 -
 .../common/spark/licenses-binary/LICENSE-CC0.txt   |    121 -
 .../common/spark/licenses-binary/LICENSE-antlr.txt |      8 -
 .../spark/licenses-binary/LICENSE-arpack.txt       |      8 -
 .../spark/licenses-binary/LICENSE-automaton.txt    |     24 -
 .../spark/licenses-binary/LICENSE-bootstrap.txt    |     13 -
 .../spark/licenses-binary/LICENSE-cloudpickle.txt  |     28 -
 .../spark/licenses-binary/LICENSE-d3.min.js.txt    |     26 -
 .../spark/licenses-binary/LICENSE-dagre-d3.txt     |     19 -
 .../spark/licenses-binary/LICENSE-datatables.txt   |      7 -
 .../common/spark/licenses-binary/LICENSE-f2j.txt   |      8 -
 .../spark/licenses-binary/LICENSE-graphlib-dot.txt |     19 -
 .../common/spark/licenses-binary/LICENSE-heapq.txt |    280 -
 .../spark/licenses-binary/LICENSE-janino.txt       |     31 -
 .../spark/licenses-binary/LICENSE-javassist.html   |    373 -
 .../spark/licenses-binary/LICENSE-javolution.txt   |     27 -
 .../common/spark/licenses-binary/LICENSE-jline.txt |     32 -
 .../common/spark/licenses-binary/LICENSE-jodd.txt  |     24 -
 .../common/spark/licenses-binary/LICENSE-join.txt  |     30 -
 .../spark/licenses-binary/LICENSE-jquery.txt       |     20 -
 .../licenses-binary/LICENSE-json-formatter.txt     |      6 -
 .../spark/licenses-binary/LICENSE-jtransforms.html |    388 -
 .../common/spark/licenses-binary/LICENSE-kryo.txt  |     10 -
 .../spark/licenses-binary/LICENSE-leveldbjni.txt   |     27 -
 .../spark/licenses-binary/LICENSE-machinist.txt    |     19 -
 .../LICENSE-matchMedia-polyfill.txt                |      1 -
 .../spark/licenses-binary/LICENSE-minlog.txt       |     10 -
 .../spark/licenses-binary/LICENSE-modernizr.txt    |     21 -
 .../spark/licenses-binary/LICENSE-mustache.txt     |     11 -
 .../spark/licenses-binary/LICENSE-netlib.txt       |     49 -
 .../spark/licenses-binary/LICENSE-paranamer.txt    |     28 -
 .../spark/licenses-binary/LICENSE-pmml-model.txt   |     10 -
 .../spark/licenses-binary/LICENSE-protobuf.txt     |     42 -
 .../common/spark/licenses-binary/LICENSE-py4j.txt  |     27 -
 .../spark/licenses-binary/LICENSE-pyrolite.txt     |     28 -
 .../spark/licenses-binary/LICENSE-reflectasm.txt   |     10 -
 .../spark/licenses-binary/LICENSE-respond.txt      |     22 -
 .../licenses-binary/LICENSE-sbt-launch-lib.txt     |     26 -
 .../common/spark/licenses-binary/LICENSE-scala.txt |     30 -
 .../common/spark/licenses-binary/LICENSE-scopt.txt |      9 -
 .../common/spark/licenses-binary/LICENSE-slf4j.txt |     21 -
 .../spark/licenses-binary/LICENSE-sorttable.js.txt |     16 -
 .../common/spark/licenses-binary/LICENSE-spire.txt |     19 -
 .../common/spark/licenses-binary/LICENSE-vis.txt   |     22 -
 .../spark/licenses-binary/LICENSE-xmlenc.txt       |     27 -
 .../spark/licenses-binary/LICENSE-zstd-jni.txt     |     26 -
 .../common/spark/licenses-binary/LICENSE-zstd.txt  |     30 -
 .../src/common/spark/patch0-SPARK-24152.diff       |     13 -
 .../src/common/spark/patch1-SPARK-31918.diff       |    114 -
 bigtop-packages/src/common/spark/spark-env.sh      |     42 -
 .../src/common/spark/spark-history-server.svc      |     70 -
 bigtop-packages/src/common/spark/spark-master.svc  |     81 -
 .../src/common/spark/spark-thriftserver.svc        |     75 -
 bigtop-packages/src/common/spark/spark-worker.svc  |     75 -
 .../src/common/sqoop/do-component-build            |     25 -
 bigtop-packages/src/common/sqoop/install_sqoop.sh  |    172 -
 .../src/common/sqoop/patch0-SQOOP-3462.diff        |     50 -
 .../src/common/sqoop/sqoop-metastore.sh            |    178 -
 .../src/common/sqoop/sqoop-metastore.sh.suse       |    145 -
 .../src/common/sqoop2/catalina.properties          |     81 -
 .../src/common/sqoop2/do-component-build           |     51 -
 .../src/common/sqoop2/install_sqoop2.sh            |    181 -
 .../common/sqoop2/patch0-server-xml-tomcat85.diff  |     17 -
 bigtop-packages/src/common/sqoop2/sqoop-server.sh  |     35 -
 bigtop-packages/src/common/sqoop2/sqoop-server.svc |    113 -
 bigtop-packages/src/common/sqoop2/sqoop-tool.sh    |     36 -
 bigtop-packages/src/common/sqoop2/sqoop.sh         |     27 -
 bigtop-packages/src/common/sqoop2/sqoop2.default   |     27 -
 .../src/common/sqoop2/tomcat-deployment.sh         |     59 -
 bigtop-packages/src/common/tez/do-component-build  |     26 -
 bigtop-packages/src/common/tez/install_tez.sh      |    115 -
 .../src/common/tez/patch0-remove-phantomjs.diff    |     12 -
 bigtop-packages/src/common/tez/tez-site.xml        |     27 -
 bigtop-packages/src/common/tez/tez.1               |     27 -
 bigtop-packages/src/common/ycsb/do-component-build |     27 -
 bigtop-packages/src/common/ycsb/install_ycsb.sh    |     98 -
 .../src/common/zeppelin/do-component-build         |     59 -
 .../src/common/zeppelin/install_zeppelin.sh        |    128 -
 .../zeppelin/patch0-spark-2.4.5-compatibility.diff |     69 -
 .../src/common/zeppelin/patch1-arm64-build.diff    |    310 -
 .../zeppelin/patch2-maven-code-coverage.diff       |    102 -
 .../src/common/zeppelin/zeppelin-env.sh            |     91 -
 bigtop-packages/src/common/zeppelin/zeppelin.svc   |     78 -
 .../src/common/zookeeper/do-component-build        |     31 -
 .../src/common/zookeeper/install_zookeeper.sh      |    221 -
 .../common/zookeeper/patch0-ZOOKEEPER-3079.diff    |     13 -
 .../common/zookeeper/patch1-ZOOKEEPER-3302.diff    |    157 -
 bigtop-packages/src/common/zookeeper/zoo.cfg       |     28 -
 .../src/common/zookeeper/zookeeper-rest.svc        |     68 -
 .../src/common/zookeeper/zookeeper-server.sh       |    182 -
 .../src/common/zookeeper/zookeeper-server.sh.suse  |    142 -
 bigtop-packages/src/common/zookeeper/zookeeper.1   |     80 -
 .../src/common/zookeeper/zookeeper.default         |     17 -
 bigtop-packages/src/deb/alluxio/alluxio.dirs       |      5 -
 bigtop-packages/src/deb/alluxio/alluxio.install    |      3 -
 bigtop-packages/src/deb/alluxio/alluxio.postinst   |     37 -
 bigtop-packages/src/deb/alluxio/alluxio.preinst    |     64 -
 bigtop-packages/src/deb/alluxio/alluxio.prerm      |     40 -
 bigtop-packages/src/deb/alluxio/changelog          |      1 -
 bigtop-packages/src/deb/alluxio/compat             |      1 -
 bigtop-packages/src/deb/alluxio/control            |     37 -
 bigtop-packages/src/deb/alluxio/copyright          |     15 -
 bigtop-packages/src/deb/alluxio/rules              |     43 -
 bigtop-packages/src/deb/alluxio/source/format      |      1 -
 .../src/deb/ambari/ambari-agent.install            |      8 -
 .../src/deb/ambari/ambari-agent.postinst           |     33 -
 bigtop-packages/src/deb/ambari/ambari-agent.postrm |     24 -
 .../src/deb/ambari/ambari-agent.preinst            |     47 -
 bigtop-packages/src/deb/ambari/ambari-agent.prerm  |     35 -
 .../src/deb/ambari/ambari-client.install           |      2 -
 .../src/deb/ambari/ambari-server.install           |     10 -
 .../src/deb/ambari/ambari-server.postinst          |     27 -
 .../src/deb/ambari/ambari-server.postrm            |     15 -
 .../src/deb/ambari/ambari-server.preinst           |     70 -
 bigtop-packages/src/deb/ambari/ambari-server.prerm |     27 -
 bigtop-packages/src/deb/ambari/changelog           |      1 -
 bigtop-packages/src/deb/ambari/compat              |      1 -
 bigtop-packages/src/deb/ambari/control             |     37 -
 bigtop-packages/src/deb/ambari/copyright           |     15 -
 bigtop-packages/src/deb/ambari/rules               |     42 -
 bigtop-packages/src/deb/ambari/source/format       |      1 -
 .../src/deb/bigtop-groovy/bigtop-groovy.install    |      4 -
 bigtop-packages/src/deb/bigtop-groovy/changelog    |      1 -
 bigtop-packages/src/deb/bigtop-groovy/compat       |      1 -
 bigtop-packages/src/deb/bigtop-groovy/control      |     27 -
 bigtop-packages/src/deb/bigtop-groovy/copyright    |     15 -
 bigtop-packages/src/deb/bigtop-groovy/rules        |     39 -
 .../src/deb/bigtop-groovy/source/format            |      1 -
 .../src/deb/bigtop-jsvc/bigtop-jsvc.install        |      2 -
 bigtop-packages/src/deb/bigtop-jsvc/changelog      |      1 -
 bigtop-packages/src/deb/bigtop-jsvc/compat         |      1 -
 bigtop-packages/src/deb/bigtop-jsvc/control        |     27 -
 bigtop-packages/src/deb/bigtop-jsvc/copyright      |     15 -
 bigtop-packages/src/deb/bigtop-jsvc/rules          |     38 -
 bigtop-packages/src/deb/bigtop-jsvc/source/format  |      1 -
 bigtop-packages/src/deb/bigtop-tomcat/changelog    |      1 -
 bigtop-packages/src/deb/bigtop-tomcat/compat       |      1 -
 bigtop-packages/src/deb/bigtop-tomcat/control      |     28 -
 bigtop-packages/src/deb/bigtop-tomcat/copyright    |     15 -
 bigtop-packages/src/deb/bigtop-tomcat/rules        |     36 -
 .../src/deb/bigtop-tomcat/source/format            |      1 -
 bigtop-packages/src/deb/bigtop-utils/compat        |      1 -
 bigtop-packages/src/deb/bigtop-utils/control       |     27 -
 bigtop-packages/src/deb/bigtop-utils/copyright     |     15 -
 bigtop-packages/src/deb/bigtop-utils/rules         |     40 -
 bigtop-packages/src/deb/bigtop-utils/source/format |      1 -
 bigtop-packages/src/deb/elasticsearch/changelog    |      1 -
 bigtop-packages/src/deb/elasticsearch/compat       |      1 -
 bigtop-packages/src/deb/elasticsearch/control      |     30 -
 bigtop-packages/src/deb/elasticsearch/copyright    |     15 -
 .../deb/elasticsearch/elasticsearch-doc.install    |      2 -
 .../src/deb/elasticsearch/elasticsearch.install    |      5 -
 .../src/deb/elasticsearch/elasticsearch.postinst   |     39 -
 .../src/deb/elasticsearch/elasticsearch.postrm     |     57 -
 .../src/deb/elasticsearch/elasticsearch.preinst    |     67 -
 .../src/deb/elasticsearch/elasticsearch.prerm      |     41 -
 bigtop-packages/src/deb/elasticsearch/rules        |     42 -
 .../src/deb/elasticsearch/source/format            |      1 -
 bigtop-packages/src/deb/flink/changelog            |      1 -
 bigtop-packages/src/deb/flink/compat               |      1 -
 bigtop-packages/src/deb/flink/control              |     38 -
 bigtop-packages/src/deb/flink/copyright            |     15 -
 .../src/deb/flink/flink-jobmanager.postinst        |     33 -
 .../src/deb/flink/flink-jobmanager.preinst         |     51 -
 .../src/deb/flink/flink-jobmanager.prerm           |     38 -
 .../src/deb/flink/flink-taskmanager.postinst       |     33 -
 .../src/deb/flink/flink-taskmanager.preinst        |     51 -
 .../src/deb/flink/flink-taskmanager.prerm          |     38 -
 bigtop-packages/src/deb/flink/flink.dirs           |      5 -
 bigtop-packages/src/deb/flink/flink.install        |      7 -
 bigtop-packages/src/deb/flink/flink.postinst       |     37 -
 bigtop-packages/src/deb/flink/flink.preinst        |     62 -
 bigtop-packages/src/deb/flink/flink.prerm          |     39 -
 bigtop-packages/src/deb/flink/rules                |     41 -
 bigtop-packages/src/deb/flink/rules.orig           |     46 -
 bigtop-packages/src/deb/flink/source/format        |      1 -
 bigtop-packages/src/deb/flume/changelog            |      1 -
 bigtop-packages/src/deb/flume/compat               |      1 -
 bigtop-packages/src/deb/flume/control              |     50 -
 bigtop-packages/src/deb/flume/copyright            |     11 -
 bigtop-packages/src/deb/flume/flume-agent.postinst |     26 -
 bigtop-packages/src/deb/flume/flume.postinst       |     38 -
 bigtop-packages/src/deb/flume/flume.preinst        |     65 -
 bigtop-packages/src/deb/flume/flume.prerm          |     57 -
 bigtop-packages/src/deb/flume/rules                |     47 -
 bigtop-packages/src/deb/flume/source/format        |      1 -
 bigtop-packages/src/deb/giraph/changelog           |      1 -
 bigtop-packages/src/deb/giraph/compat              |      1 -
 bigtop-packages/src/deb/giraph/control             |     39 -
 bigtop-packages/src/deb/giraph/copyright           |     15 -
 bigtop-packages/src/deb/giraph/giraph-doc.install  |      1 -
 bigtop-packages/src/deb/giraph/giraph.install      |      3 -
 bigtop-packages/src/deb/giraph/giraph.postinst     |     34 -
 bigtop-packages/src/deb/giraph/giraph.prerm        |     38 -
 bigtop-packages/src/deb/giraph/rules               |     38 -
 bigtop-packages/src/deb/giraph/source/format       |      1 -
 bigtop-packages/src/deb/gpdb/changelog             |      1 -
 bigtop-packages/src/deb/gpdb/compat                |      1 -
 bigtop-packages/src/deb/gpdb/control               |     27 -
 bigtop-packages/src/deb/gpdb/copyright             |     15 -
 bigtop-packages/src/deb/gpdb/rules                 |     46 -
 bigtop-packages/src/deb/gpdb/source/format         |      1 -
 bigtop-packages/src/deb/hadoop/append_licenses.sh  |     45 -
 bigtop-packages/src/deb/hadoop/changelog           |      1 -
 bigtop-packages/src/deb/hadoop/compat              |      1 -
 bigtop-packages/src/deb/hadoop/control             |    214 -
 bigtop-packages/src/deb/hadoop/copyright           |     15 -
 bigtop-packages/src/deb/hadoop/dirs                |      1 -
 .../src/deb/hadoop/hadoop-client.install           |      1 -
 .../src/deb/hadoop/hadoop-conf-pseudo.install      |      1 -
 .../hadoop/hadoop-conf-pseudo.lintian-overrides    |      1 -
 .../src/deb/hadoop/hadoop-conf-pseudo.postinst     |     58 -
 .../src/deb/hadoop/hadoop-conf-pseudo.prerm        |     57 -
 bigtop-packages/src/deb/hadoop/hadoop-doc.dirs     |      1 -
 bigtop-packages/src/deb/hadoop/hadoop-doc.install  |      1 -
 .../src/deb/hadoop/hadoop-doc.lintian-overrides    |      3 -
 .../src/deb/hadoop/hadoop-hdfs-fuse.dirs           |      0
 .../src/deb/hadoop/hadoop-hdfs-fuse.install        |      3 -
 .../deb/hadoop/hadoop-hdfs-fuse.lintian-overrides  |      1 -
 .../src/deb/hadoop/hadoop-hdfs-fuse.manpages       |      1 -
 bigtop-packages/src/deb/hadoop/hadoop-hdfs.dirs    |      6 -
 bigtop-packages/src/deb/hadoop/hadoop-hdfs.install |     11 -
 .../src/deb/hadoop/hadoop-hdfs.postinst            |     41 -
 bigtop-packages/src/deb/hadoop/hadoop-hdfs.preinst |     63 -
 bigtop-packages/src/deb/hadoop/hadoop-httpfs.dirs  |      3 -
 .../src/deb/hadoop/hadoop-httpfs.install           |      5 -
 .../src/deb/hadoop/hadoop-httpfs.postinst          |     39 -
 .../src/deb/hadoop/hadoop-httpfs.preinst           |     61 -
 bigtop-packages/src/deb/hadoop/hadoop-httpfs.prerm |     58 -
 bigtop-packages/src/deb/hadoop/hadoop-kms.dirs     |      3 -
 bigtop-packages/src/deb/hadoop/hadoop-kms.install  |      5 -
 bigtop-packages/src/deb/hadoop/hadoop-kms.postinst |     40 -
 bigtop-packages/src/deb/hadoop/hadoop-kms.preinst  |     61 -
 bigtop-packages/src/deb/hadoop/hadoop-kms.prerm    |     58 -
 .../src/deb/hadoop/hadoop-mapreduce.dirs           |      6 -
 .../src/deb/hadoop/hadoop-mapreduce.install        |     11 -
 .../src/deb/hadoop/hadoop-mapreduce.postinst       |     41 -
 .../src/deb/hadoop/hadoop-mapreduce.preinst        |     62 -
 .../src/deb/hadoop/hadoop-native.lintian-overrides |      1 -
 bigtop-packages/src/deb/hadoop/hadoop-yarn.dirs    |      6 -
 bigtop-packages/src/deb/hadoop/hadoop-yarn.install |     11 -
 .../src/deb/hadoop/hadoop-yarn.postinst            |     43 -
 bigtop-packages/src/deb/hadoop/hadoop-yarn.preinst |     63 -
 .../src/deb/hadoop/hadoop.daemon.postinst.tpl      |     25 -
 bigtop-packages/src/deb/hadoop/hadoop.dirs         |      4 -
 bigtop-packages/src/deb/hadoop/hadoop.install      |     24 -
 .../src/deb/hadoop/hadoop.lintian-overrides        |      7 -
 bigtop-packages/src/deb/hadoop/hadoop.manpages     |      4 -
 bigtop-packages/src/deb/hadoop/hadoop.postinst     |     39 -
 bigtop-packages/src/deb/hadoop/hadoop.preinst      |     52 -
 bigtop-packages/src/deb/hadoop/hadoop.prerm        |     57 -
 .../src/deb/hadoop/libhdfs0-dev.install            |      1 -
 bigtop-packages/src/deb/hadoop/libhdfs0.dirs       |      1 -
 bigtop-packages/src/deb/hadoop/libhdfs0.install    |      1 -
 bigtop-packages/src/deb/hadoop/rules               |     93 -
 bigtop-packages/src/deb/hadoop/shlibs.local        |      1 -
 .../src/deb/hadoop/source.lintian-overrides        |      0
 bigtop-packages/src/deb/hadoop/source/format       |      1 -
 bigtop-packages/src/deb/hbase/compat               |      1 -
 bigtop-packages/src/deb/hbase/control              |     84 -
 bigtop-packages/src/deb/hbase/copyright            |     15 -
 bigtop-packages/src/deb/hbase/hbase-doc.dirs       |      1 -
 bigtop-packages/src/deb/hbase/hbase-doc.install    |      1 -
 bigtop-packages/src/deb/hbase/hbase.dirs           |      5 -
 bigtop-packages/src/deb/hbase/hbase.install        |      4 -
 bigtop-packages/src/deb/hbase/hbase.manpages       |      1 -
 bigtop-packages/src/deb/hbase/hbase.postinst       |     37 -
 bigtop-packages/src/deb/hbase/hbase.preinst        |     62 -
 bigtop-packages/src/deb/hbase/hbase.prerm          |     57 -
 .../src/deb/hbase/install_init_scripts.sh          |     37 -
 bigtop-packages/src/deb/hbase/rules                |     73 -
 bigtop-packages/src/deb/hbase/source/format        |      1 -
 .../src/deb/hive/build-hive-install-file.sh        |     41 -
 bigtop-packages/src/deb/hive/changelog             |      1 -
 bigtop-packages/src/deb/hive/compat                |      1 -
 bigtop-packages/src/deb/hive/control               |     88 -
 bigtop-packages/src/deb/hive/copyright             |     15 -
 bigtop-packages/src/deb/hive/hive-hbase.install    |      2 -
 bigtop-packages/src/deb/hive/hive-hcatalog.dirs    |      2 -
 bigtop-packages/src/deb/hive/hive-hcatalog.install |      9 -
 .../src/deb/hive/hive-hcatalog.postinst            |     35 -
 bigtop-packages/src/deb/hive/hive-hcatalog.prerm   |     57 -
 bigtop-packages/src/deb/hive/hive-jdbc.install     |      1 -
 bigtop-packages/src/deb/hive/hive-webhcat.install  |      4 -
 bigtop-packages/src/deb/hive/hive-webhcat.postinst |     34 -
 bigtop-packages/src/deb/hive/hive-webhcat.prerm    |     57 -
 bigtop-packages/src/deb/hive/hive.dirs             |      3 -
 bigtop-packages/src/deb/hive/hive.install.include  |      9 -
 bigtop-packages/src/deb/hive/hive.postinst         |     50 -
 bigtop-packages/src/deb/hive/hive.preinst          |     53 -
 bigtop-packages/src/deb/hive/hive.prerm            |     45 -
 bigtop-packages/src/deb/hive/rules                 |     61 -
 bigtop-packages/src/deb/hive/source/format         |      1 -
 bigtop-packages/src/deb/ignite-hadoop/compat       |      1 -
 bigtop-packages/src/deb/ignite-hadoop/control      |     39 -
 bigtop-packages/src/deb/ignite-hadoop/copyright    |     15 -
 bigtop-packages/src/deb/ignite-hadoop/format       |      1 -
 .../src/deb/ignite-hadoop/ignite-hadoop-doc.dirs   |      1 -
 .../src/deb/ignite-hadoop/ignite-hadoop.dirs       |      3 -
 .../src/deb/ignite-hadoop/ignite-hadoop.install    |      3 -
 .../src/deb/ignite-hadoop/ignite-hadoop.manpages   |      1 -
 .../src/deb/ignite-hadoop/ignite-hadoop.postinst   |     47 -
 .../src/deb/ignite-hadoop/ignite-hadoop.preinst    |     62 -
 .../src/deb/ignite-hadoop/ignite-hadoop.prerm      |     55 -
 bigtop-packages/src/deb/ignite-hadoop/rules        |     46 -
 .../src/deb/ignite-hadoop/source/format            |      1 -
 bigtop-packages/src/deb/kafka/changelog            |      1 -
 bigtop-packages/src/deb/kafka/compat               |      1 -
 bigtop-packages/src/deb/kafka/control              |     34 -
 bigtop-packages/src/deb/kafka/copyright            |     11 -
 .../src/deb/kafka/kafka-server.postinst            |     32 -
 bigtop-packages/src/deb/kafka/kafka-server.postrm  |     32 -
 bigtop-packages/src/deb/kafka/kafka.postinst       |     35 -
 bigtop-packages/src/deb/kafka/kafka.preinst        |     33 -
 bigtop-packages/src/deb/kafka/kafka.prerm          |     32 -
 bigtop-packages/src/deb/kafka/rules                |     38 -
 bigtop-packages/src/deb/kafka/source/format        |      1 -
 bigtop-packages/src/deb/kibana/compat              |      1 -
 bigtop-packages/src/deb/kibana/control             |     30 -
 bigtop-packages/src/deb/kibana/copyright           |     15 -
 bigtop-packages/src/deb/kibana/kibana.install      |      3 -
 bigtop-packages/src/deb/kibana/kibana.postinst     |     39 -
 bigtop-packages/src/deb/kibana/kibana.postrm       |     51 -
 bigtop-packages/src/deb/kibana/kibana.preinst      |     67 -
 bigtop-packages/src/deb/kibana/kibana.prerm        |     38 -
 bigtop-packages/src/deb/kibana/rules               |     39 -
 bigtop-packages/src/deb/kibana/source/format       |      1 -
 bigtop-packages/src/deb/livy/changelog             |      1 -
 bigtop-packages/src/deb/livy/compat                |      1 -
 bigtop-packages/src/deb/livy/control               |     28 -
 bigtop-packages/src/deb/livy/copyright             |     15 -
 bigtop-packages/src/deb/livy/livy.dirs             |      4 -
 bigtop-packages/src/deb/livy/livy.install          |      5 -
 bigtop-packages/src/deb/livy/livy.postinst         |     37 -
 bigtop-packages/src/deb/livy/livy.preinst          |     62 -
 bigtop-packages/src/deb/livy/livy.prerm            |     38 -
 bigtop-packages/src/deb/livy/rules                 |     40 -
 bigtop-packages/src/deb/livy/source/format         |      1 -
 bigtop-packages/src/deb/logstash/compat            |      1 -
 bigtop-packages/src/deb/logstash/control           |     31 -
 bigtop-packages/src/deb/logstash/copyright         |     15 -
 bigtop-packages/src/deb/logstash/format            |      1 -
 bigtop-packages/src/deb/logstash/logstash.install  |      3 -
 bigtop-packages/src/deb/logstash/logstash.postinst |     42 -
 bigtop-packages/src/deb/logstash/logstash.postrm   |     52 -
 bigtop-packages/src/deb/logstash/logstash.preinst  |     65 -
 bigtop-packages/src/deb/logstash/logstash.prerm    |     55 -
 bigtop-packages/src/deb/logstash/rules             |     38 -
 bigtop-packages/src/deb/logstash/source/format     |      1 -
 bigtop-packages/src/deb/mahout/changelog           |      1 -
 bigtop-packages/src/deb/mahout/compat              |      1 -
 bigtop-packages/src/deb/mahout/control             |     50 -
 bigtop-packages/src/deb/mahout/copyright           |     15 -
 bigtop-packages/src/deb/mahout/mahout-doc.dirs     |      1 -
 bigtop-packages/src/deb/mahout/mahout-doc.install  |      3 -
 bigtop-packages/src/deb/mahout/mahout.dirs         |      3 -
 bigtop-packages/src/deb/mahout/mahout.install      |      3 -
 bigtop-packages/src/deb/mahout/mahout.postinst     |     34 -
 bigtop-packages/src/deb/mahout/mahout.prerm        |     38 -
 bigtop-packages/src/deb/mahout/rules               |     39 -
 bigtop-packages/src/deb/mahout/source/format       |      1 -
 bigtop-packages/src/deb/oozie/changelog            |      1 -
 bigtop-packages/src/deb/oozie/compat               |      1 -
 bigtop-packages/src/deb/oozie/control              |     75 -
 bigtop-packages/src/deb/oozie/copyright            |     15 -
 bigtop-packages/src/deb/oozie/oozie.dirs           |      1 -
 bigtop-packages/src/deb/oozie/oozie.postinst       |     59 -
 bigtop-packages/src/deb/oozie/oozie.postrm         |     52 -
 bigtop-packages/src/deb/oozie/oozie.preinst        |     63 -
 bigtop-packages/src/deb/oozie/oozie.prerm          |     55 -
 bigtop-packages/src/deb/oozie/rules                |     61 -
 bigtop-packages/src/deb/oozie/source/format        |      1 -
 bigtop-packages/src/deb/phoenix/changelog          |      1 -
 bigtop-packages/src/deb/phoenix/compat             |      1 -
 bigtop-packages/src/deb/phoenix/control            |     42 -
 bigtop-packages/src/deb/phoenix/copyright          |     11 -
 bigtop-packages/src/deb/phoenix/phoenix.dirs       |      1 -
 bigtop-packages/src/deb/phoenix/phoenix.install    |      2 -
 bigtop-packages/src/deb/phoenix/phoenix.postinst   |     35 -
 bigtop-packages/src/deb/phoenix/phoenix.preinst    |     53 -
 bigtop-packages/src/deb/phoenix/phoenix.prerm      |     57 -
 bigtop-packages/src/deb/phoenix/rules              |     51 -
 bigtop-packages/src/deb/phoenix/source/format      |      1 -
 bigtop-packages/src/deb/qfs/changelog              |      1 -
 bigtop-packages/src/deb/qfs/compat                 |      1 -
 bigtop-packages/src/deb/qfs/control                |     99 -
 bigtop-packages/src/deb/qfs/copyright              |     15 -
 bigtop-packages/src/deb/qfs/qfs-chunkserver.dirs   |      2 -
 .../src/deb/qfs/qfs-chunkserver.install            |      3 -
 .../src/deb/qfs/qfs-chunkserver.lintian-overrides  |      2 -
 .../src/deb/qfs/qfs-chunkserver.postinst           |     34 -
 bigtop-packages/src/deb/qfs/qfs-chunkserver.postrm |     32 -
 .../src/deb/qfs/qfs-chunkserver.preinst            |     34 -
 bigtop-packages/src/deb/qfs/qfs-client.install     |     31 -
 .../src/deb/qfs/qfs-client.lintian-overrides       |      3 -
 bigtop-packages/src/deb/qfs/qfs-dev.install        |     27 -
 .../src/deb/qfs/qfs-dev.lintian-overrides          |      2 -
 bigtop-packages/src/deb/qfs/qfs-fuse.install       |      1 -
 .../src/deb/qfs/qfs-fuse.lintian-overrides         |      2 -
 bigtop-packages/src/deb/qfs/qfs-hadoop.install     |      1 -
 .../src/deb/qfs/qfs-hadoop.lintian-overrides       |      2 -
 bigtop-packages/src/deb/qfs/qfs-java.install       |      1 -
 .../src/deb/qfs/qfs-java.lintian-overrides         |      1 -
 bigtop-packages/src/deb/qfs/qfs-metaserver.dirs    |      2 -
 bigtop-packages/src/deb/qfs/qfs-metaserver.install |      4 -
 .../src/deb/qfs/qfs-metaserver.lintian-overrides   |      2 -
 .../src/deb/qfs/qfs-metaserver.postinst            |     34 -
 bigtop-packages/src/deb/qfs/qfs-metaserver.postrm  |     32 -
 bigtop-packages/src/deb/qfs/qfs-metaserver.preinst |     34 -
 bigtop-packages/src/deb/qfs/qfs-python.install     |      1 -
 .../src/deb/qfs/qfs-python.lintian-overrides       |      1 -
 bigtop-packages/src/deb/qfs/qfs-webui.dirs         |      2 -
 bigtop-packages/src/deb/qfs/qfs-webui.install      |      2 -
 .../src/deb/qfs/qfs-webui.lintian-overrides        |      1 -
 bigtop-packages/src/deb/qfs/qfs-webui.postinst     |     34 -
 bigtop-packages/src/deb/qfs/qfs-webui.postrm       |     32 -
 bigtop-packages/src/deb/qfs/qfs-webui.preinst      |     34 -
 bigtop-packages/src/deb/qfs/qfs.lintian-overrides  |      2 -
 bigtop-packages/src/deb/qfs/rules                  |     84 -
 bigtop-packages/src/deb/qfs/source/format          |      1 -
 bigtop-packages/src/deb/solr/changelog             |      1 -
 bigtop-packages/src/deb/solr/compat                |      1 -
 bigtop-packages/src/deb/solr/control               |     55 -
 bigtop-packages/src/deb/solr/copyright             |     15 -
 bigtop-packages/src/deb/solr/rules                 |     44 -
 bigtop-packages/src/deb/solr/solr-doc.install      |      3 -
 bigtop-packages/src/deb/solr/solr-server.install   |      1 -
 bigtop-packages/src/deb/solr/solr-server.postinst  |     25 -
 bigtop-packages/src/deb/solr/solr.install          |      5 -
 bigtop-packages/src/deb/solr/solr.postinst         |     35 -
 bigtop-packages/src/deb/solr/solr.preinst          |     62 -
 bigtop-packages/src/deb/solr/solr.prerm            |     38 -
 bigtop-packages/src/deb/solr/source/format         |      1 -
 bigtop-packages/src/deb/spark/changelog            |      1 -
 bigtop-packages/src/deb/spark/compat               |      1 -
 bigtop-packages/src/deb/spark/control              |     82 -
 bigtop-packages/src/deb/spark/copyright            |     15 -
 bigtop-packages/src/deb/spark/rules                |     53 -
 bigtop-packages/src/deb/spark/source/format        |      1 -
 bigtop-packages/src/deb/spark/spark-core.install   |     31 -
 bigtop-packages/src/deb/spark/spark-core.postinst  |     35 -
 bigtop-packages/src/deb/spark/spark-core.preinst   |     63 -
 bigtop-packages/src/deb/spark/spark-core.prerm     |     38 -
 .../src/deb/spark/spark-datanucleus.install        |      2 -
 .../src/deb/spark/spark-external.install           |      1 -
 bigtop-packages/src/deb/spark/spark-python.install |      4 -
 bigtop-packages/src/deb/spark/spark-sparkr.install |      3 -
 .../src/deb/spark/spark-yarn-shuffle.install       |      2 -
 bigtop-packages/src/deb/sqoop/changelog            |      1 -
 bigtop-packages/src/deb/sqoop/compat               |      1 -
 bigtop-packages/src/deb/sqoop/control              |     35 -
 bigtop-packages/src/deb/sqoop/copyright            |     15 -
 bigtop-packages/src/deb/sqoop/rules                |     39 -
 bigtop-packages/src/deb/sqoop/source/format        |      1 -
 .../src/deb/sqoop/sqoop-metastore.postinst         |     35 -
 bigtop-packages/src/deb/sqoop/sqoop.postinst       |     39 -
 bigtop-packages/src/deb/sqoop/sqoop.preinst        |     64 -
 bigtop-packages/src/deb/sqoop/sqoop.prerm          |     57 -
 bigtop-packages/src/deb/sqoop2/changelog           |      1 -
 bigtop-packages/src/deb/sqoop2/compat              |      2 -
 bigtop-packages/src/deb/sqoop2/control             |     39 -
 bigtop-packages/src/deb/sqoop2/copyright           |     11 -
 bigtop-packages/src/deb/sqoop2/rules               |     44 -
 bigtop-packages/src/deb/sqoop2/source/format       |      1 -
 .../src/deb/sqoop2/sqoop2-client.install           |      6 -
 .../src/deb/sqoop2/sqoop2-server.install           |      1 -
 .../src/deb/sqoop2/sqoop2-server.postinst          |     21 -
 bigtop-packages/src/deb/sqoop2/sqoop2.install      |     11 -
 bigtop-packages/src/deb/sqoop2/sqoop2.postinst     |     21 -
 bigtop-packages/src/deb/sqoop2/sqoop2.preinst      |     65 -
 bigtop-packages/src/deb/sqoop2/sqoop2.prerm        |     46 -
 bigtop-packages/src/deb/tez/compat                 |      1 -
 bigtop-packages/src/deb/tez/control                |     29 -
 bigtop-packages/src/deb/tez/copyright              |     16 -
 bigtop-packages/src/deb/tez/rules                  |     50 -
 bigtop-packages/src/deb/tez/source/format          |      1 -
 bigtop-packages/src/deb/tez/tez.install            |      3 -
 bigtop-packages/src/deb/ycsb/changelog             |      1 -
 bigtop-packages/src/deb/ycsb/compat                |      1 -
 bigtop-packages/src/deb/ycsb/control               |     30 -
 bigtop-packages/src/deb/ycsb/copyright             |     15 -
 bigtop-packages/src/deb/ycsb/rules                 |     35 -
 bigtop-packages/src/deb/ycsb/source/format         |      1 -
 bigtop-packages/src/deb/ycsb/ycsb.install          |      2 -
 bigtop-packages/src/deb/zeppelin/changelog         |      1 -
 bigtop-packages/src/deb/zeppelin/compat            |      1 -
 bigtop-packages/src/deb/zeppelin/control           |     31 -
 bigtop-packages/src/deb/zeppelin/copyright         |     15 -
 bigtop-packages/src/deb/zeppelin/rules             |     42 -
 bigtop-packages/src/deb/zeppelin/source/format     |      1 -
 bigtop-packages/src/deb/zeppelin/zeppelin.dirs     |      4 -
 bigtop-packages/src/deb/zeppelin/zeppelin.install  |      6 -
 bigtop-packages/src/deb/zeppelin/zeppelin.postinst |     42 -
 bigtop-packages/src/deb/zeppelin/zeppelin.preinst  |     63 -
 bigtop-packages/src/deb/zeppelin/zeppelin.prerm    |     38 -
 bigtop-packages/src/deb/zookeeper/changelog        |      1 -
 bigtop-packages/src/deb/zookeeper/compat           |      1 -
 bigtop-packages/src/deb/zookeeper/control          |     46 -
 bigtop-packages/src/deb/zookeeper/copyright        |     15 -
 bigtop-packages/src/deb/zookeeper/rules            |     51 -
 bigtop-packages/src/deb/zookeeper/source/format    |      1 -
 .../src/deb/zookeeper/zookeeper-server.init        |    191 -
 .../src/deb/zookeeper/zookeeper.postinst           |     40 -
 .../src/deb/zookeeper/zookeeper.preinst            |     64 -
 bigtop-packages/src/deb/zookeeper/zookeeper.prerm  |     47 -
 bigtop-packages/src/extensions/aws-maven.xml       |     26 -
 bigtop-packages/src/rpm/alluxio/SPECS/alluxio.spec |    148 -
 bigtop-packages/src/rpm/ambari/RPMS/.gitignore     |      1 -
 bigtop-packages/src/rpm/ambari/SPECS/.gitignore    |      3 -
 bigtop-packages/src/rpm/ambari/SPECS/ambari.spec   |    510 -
 .../src/rpm/bigtop-groovy/SPECS/.gitignore         |      0
 .../src/rpm/bigtop-groovy/SPECS/bigtop-groovy.spec |     60 -
 bigtop-packages/src/rpm/bigtop-jsvc/.gitignore     |      0
 .../src/rpm/bigtop-jsvc/RPMS/.gitignore            |      0
 .../src/rpm/bigtop-jsvc/SPECS/.gitignore           |      0
 .../src/rpm/bigtop-jsvc/SPECS/bigtop-jsvc.spec     |     67 -
 .../src/rpm/bigtop-jsvc/SRPMS/.gitignore           |      0
 .../src/rpm/bigtop-tomcat/BUILD/.gitignore         |      0
 .../src/rpm/bigtop-tomcat/RPMS/.gitignore          |      0
 .../src/rpm/bigtop-tomcat/SOURCES/.gitignore       |      0
 .../src/rpm/bigtop-tomcat/SPECS/bigtop-tomcat.spec |     60 -
 .../src/rpm/bigtop-tomcat/SRPMS/.gitignore         |      0
 .../src/rpm/bigtop-utils/RPMS/.gitignore           |      1 -
 .../src/rpm/bigtop-utils/SPECS/.gitignore          |      1 -
 .../src/rpm/bigtop-utils/SPECS/bigtop-utils.spec   |     84 -
 .../src/rpm/elasticsearch/SPECS/elasticsearch.spec |    112 -
 bigtop-packages/src/rpm/flink/SPECS/flink.spec     |    165 -
 bigtop-packages/src/rpm/flume/RPMS/.gitignore      |      0
 bigtop-packages/src/rpm/flume/SPECS/flume.spec     |    194 -
 bigtop-packages/src/rpm/flume/SRPMS/.gitignore     |      0
 bigtop-packages/src/rpm/giraph/BUILD/.gitignore    |      0
 bigtop-packages/src/rpm/giraph/RPMS/.gitignore     |      0
 bigtop-packages/src/rpm/giraph/SOURCES/.gitignore  |      0
 bigtop-packages/src/rpm/giraph/SPECS/giraph.spec   |    111 -
 bigtop-packages/src/rpm/giraph/SRPMS/.gitignore    |      0
 bigtop-packages/src/rpm/gpdb/BUILD/.gitignore      |      0
 bigtop-packages/src/rpm/gpdb/RPMS/.gitignore       |      0
 bigtop-packages/src/rpm/gpdb/SOURCES/.gitignore    |      0
 bigtop-packages/src/rpm/gpdb/SPECS/gpdb.spec       |     66 -
 bigtop-packages/src/rpm/gpdb/SRPMS/.gitignore      |      0
 bigtop-packages/src/rpm/hadoop/RPMS/.gitignore     |      1 -
 bigtop-packages/src/rpm/hadoop/SOURCES/.gitignore  |      0
 bigtop-packages/src/rpm/hadoop/SPECS/.gitignore    |      3 -
 bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec   |    818 -
 bigtop-packages/src/rpm/hbase/RPMS/.gitignore      |      1 -
 bigtop-packages/src/rpm/hbase/SOURCES/.gitignore   |      0
 bigtop-packages/src/rpm/hbase/SPECS/.gitignore     |      3 -
 bigtop-packages/src/rpm/hbase/SPECS/hbase.spec     |    392 -
 bigtop-packages/src/rpm/hbase/SRPMS/.gitignore     |      0
 bigtop-packages/src/rpm/hive/RPMS/.gitignore       |      1 -
 bigtop-packages/src/rpm/hive/SPECS/.gitignore      |      3 -
 bigtop-packages/src/rpm/hive/SPECS/hive.spec       |    398 -
 .../src/rpm/ignite-hadoop/SPECS/ignite-hadoop.spec |    225 -
 bigtop-packages/src/rpm/kafka/SPECS/kafka.spec     |    171 -
 bigtop-packages/src/rpm/kibana/SPECS/kibana.spec   |     98 -
 bigtop-packages/src/rpm/livy/SPECS/livy.spec       |    130 -
 .../src/rpm/logstash/SPECS/logstash.spec           |    112 -
 bigtop-packages/src/rpm/mahout/BUILD/.gitignore    |      0
 bigtop-packages/src/rpm/mahout/RPMS/.gitignore     |      0
 bigtop-packages/src/rpm/mahout/SOURCES/.gitignore  |      0
 bigtop-packages/src/rpm/mahout/SPECS/mahout.spec   |    111 -
 bigtop-packages/src/rpm/mahout/SRPMS/.gitignore    |      0
 bigtop-packages/src/rpm/oozie/RPMS/.gitignore      |      0
 bigtop-packages/src/rpm/oozie/SOURCES/.gitignore   |      0
 bigtop-packages/src/rpm/oozie/SPECS/oozie.spec     |    232 -
 bigtop-packages/src/rpm/oozie/SRPMS/.gitignore     |      0
 bigtop-packages/src/rpm/phoenix/RPMS/.gitignore    |      1 -
 bigtop-packages/src/rpm/phoenix/SOURCES/.gitignore |      0
 bigtop-packages/src/rpm/phoenix/SPECS/.gitignore   |      3 -
 bigtop-packages/src/rpm/phoenix/SPECS/phoenix.spec |    184 -
 bigtop-packages/src/rpm/phoenix/SRPMS/.gitignore   |      0
 bigtop-packages/src/rpm/qfs/SPECS/qfs.spec         |    353 -
 bigtop-packages/src/rpm/solr/BUILD/.gitignore      |      0
 bigtop-packages/src/rpm/solr/RPMS/.gitignore       |      0
 bigtop-packages/src/rpm/solr/SOURCES/.gitignore    |      0
 .../src/rpm/solr/SOURCES/solr-server.init          |    188 -
 bigtop-packages/src/rpm/solr/SPECS/solr.spec       |    169 -
 bigtop-packages/src/rpm/solr/SRPMS/.gitignore      |      0
 bigtop-packages/src/rpm/spark/BUILD/.gitignore     |      0
 bigtop-packages/src/rpm/spark/RPMS/.gitignore      |      0
 bigtop-packages/src/rpm/spark/SOURCES/.gitignore   |      0
 bigtop-packages/src/rpm/spark/SPECS/spark.spec     |    287 -
 bigtop-packages/src/rpm/spark/SRPMS/.gitignore     |      0
 bigtop-packages/src/rpm/sqoop/BUILD/.gitignore     |      2 -
 bigtop-packages/src/rpm/sqoop/RPMS/.gitignore      |      0
 bigtop-packages/src/rpm/sqoop/SOURCES/.gitignore   |      0
 bigtop-packages/src/rpm/sqoop/SPECS/sqoop.spec     |    188 -
 bigtop-packages/src/rpm/sqoop/SRPMS/.gitignore     |      0
 bigtop-packages/src/rpm/sqoop2/BUILD/.gitignore    |      2 -
 bigtop-packages/src/rpm/sqoop2/RPMS/.gitignore     |      0
 bigtop-packages/src/rpm/sqoop2/SOURCES/.gitignore  |      0
 bigtop-packages/src/rpm/sqoop2/SPECS/sqoop2.spec   |    203 -
 bigtop-packages/src/rpm/sqoop2/SRPMS/.gitignore    |      0
 bigtop-packages/src/rpm/tez/RPMS/.gitignore        |      1 -
 bigtop-packages/src/rpm/tez/SPECS/tez.spec         |    120 -
 bigtop-packages/src/rpm/ycsb/BUILD/.gitignore      |      0
 bigtop-packages/src/rpm/ycsb/RPMS/.gitignore       |      0
 bigtop-packages/src/rpm/ycsb/SOURCES/.gitignore    |      0
 bigtop-packages/src/rpm/ycsb/SPECS/ycsb.spec       |     60 -
 bigtop-packages/src/rpm/ycsb/SRPMS/.gitignore      |      0
 .../src/rpm/zeppelin/SPECS/zeppelin.spec           |    138 -
 bigtop-packages/src/rpm/zookeeper/.gitignore       |      0
 bigtop-packages/src/rpm/zookeeper/RPMS/.gitignore  |      1 -
 bigtop-packages/src/rpm/zookeeper/SPECS/.gitignore |      3 -
 .../src/rpm/zookeeper/SPECS/zookeeper.spec         |    251 -
 bigtop-packages/src/rpm/zookeeper/SRPMS/.gitignore |      0
 bigtop-packages/src/scripts/maven_deploy.sh        |     41 -
 bigtop-packages/src/templates/init.d.tmpl          |    314 -
 bigtop-test-framework/README                       |     84 -
 bigtop-test-framework/pom.xml                      |    160 -
 .../groovy/org/apache/bigtop/itest/Contract.java   |     40 -
 .../org/apache/bigtop/itest/JUnitUtils.groovy      |     54 -
 .../org/apache/bigtop/itest/JarContent.groovy      |    249 -
 .../org/apache/bigtop/itest/LogErrorsUtils.groovy  |     34 -
 .../org/apache/bigtop/itest/ParameterSetter.java   |    182 -
 .../groovy/org/apache/bigtop/itest/Property.java   |     46 -
 .../org/apache/bigtop/itest/TestListUtils.groovy   |     51 -
 .../org/apache/bigtop/itest/TestUtils.groovy       |     91 -
 .../groovy/org/apache/bigtop/itest/Variable.java   |     34 -
 .../bigtop/itest/failures/AbstractFailure.groovy   |    192 -
 .../bigtop/itest/failures/FailureConstants.groovy  |     38 -
 .../bigtop/itest/failures/FailureExecutor.groovy   |     96 -
 .../bigtop/itest/failures/FailureVars.groovy       |    189 -
 .../itest/failures/NetworkShutdownFailure.groovy   |     79 -
 .../itest/failures/ServiceKilledFailure.groovy     |     70 -
 .../itest/failures/ServiceRestartFailure.groovy    |     70 -
 .../org/apache/bigtop/itest/junit/Ordered.java     |     83 -
 .../bigtop/itest/junit/OrderedParameterized.java   |    187 -
 .../itest/pmanager/AptCmdLinePackageManager.groovy |     99 -
 .../apache/bigtop/itest/pmanager/DEBPackage.groovy |    118 -
 .../bigtop/itest/pmanager/ManagedPackage.groovy    |     35 -
 .../bigtop/itest/pmanager/PackageInstance.groovy   |    117 -
 .../bigtop/itest/pmanager/PackageManager.groovy    |    233 -
 .../apache/bigtop/itest/pmanager/RPMPackage.groovy |    141 -
 .../pmanager/UrpmiCmdLinePackageManager.groovy     |     76 -
 .../itest/pmanager/YumCmdLinePackageManager.groovy |     79 -
 .../pmanager/ZypperCmdLinePackageManager.groovy    |     79 -
 .../apache/bigtop/itest/posix/Alternative.groovy   |    204 -
 .../org/apache/bigtop/itest/posix/Service.groovy   |    107 -
 .../org/apache/bigtop/itest/posix/UGI.groovy       |     78 -
 .../apache/bigtop/itest/shell/JUnitShell.groovy    |     86 -
 .../groovy/org/apache/bigtop/itest/shell/OS.groovy |     42 -
 .../org/apache/bigtop/itest/shell/Shell.groovy     |    143 -
 .../org/apache/bigtop/itest/DummyTestError.groovy  |     28 -
 .../org/apache/bigtop/itest/DummyTestFail.groovy   |     29 -
 .../org/apache/bigtop/itest/DummyTestPass.groovy   |     27 -
 .../org/apache/bigtop/itest/JUnitUtilsTest.groovy  |     66 -
 .../org/apache/bigtop/itest/JarContentTest.groovy  |    175 -
 .../apache/bigtop/itest/TestContractGroovy.groovy  |     87 -
 .../org/apache/bigtop/itest/TestContractJava.java  |     88 -
 .../apache/bigtop/itest/TestContractJavaProc.java  |     90 -
 .../apache/bigtop/itest/TestListUtilsTest.groovy   |     42 -
 .../failures/IntegrationTestClusterFailures.groovy |    170 -
 .../itest/junit/OrderedParameterizedTest.groovy    |     66 -
 .../apache/bigtop/itest/junit/OrderedTest.groovy   |     54 -
 .../itest/pmanager/PackageManagerTest.groovy       |    141 -
 .../bigtop/itest/posix/AlternativeTest.groovy      |     42 -
 .../apache/bigtop/itest/posix/ServiceTest.groovy   |     44 -
 .../org/apache/bigtop/itest/posix/UGITest.groovy   |     38 -
 .../org/apache/bigtop/itest/shell/ShellTest.groovy |     52 -
 bigtop-tests/build.gradle                          |     67 -
 bigtop-tests/cloud-weather-report/README.md        |     43 -
 .../cloud-weather-report/hadoop-processing.yaml    |      7 -
 bigtop-tests/smoke-tests/README                    |     99 -
 .../smoke-tests/alluxio/TestAlluxioSmoke.groovy    |     80 -
 bigtop-tests/smoke-tests/alluxio/build.gradle      |     31 -
 bigtop-tests/smoke-tests/alluxio/datafile          |      1 -
 bigtop-tests/smoke-tests/alluxio/log4j.properties  |     24 -
 .../smoke-tests/ambari/TestAmbariSimple.groovy     |     77 -
 bigtop-tests/smoke-tests/ambari/build.gradle       |     32 -
 bigtop-tests/smoke-tests/build.gradle              |     86 -
 .../smoke-tests/elasticsearch/build.gradle         |     43 -
 bigtop-tests/smoke-tests/flink/TestFlink.groovy    |     89 -
 bigtop-tests/smoke-tests/flink/build.gradle        |     34 -
 bigtop-tests/smoke-tests/flink/test.data           |     11 -
 bigtop-tests/smoke-tests/flume/TestFlumeNG.groovy  |    112 -
 bigtop-tests/smoke-tests/flume/build.gradle        |     35 -
 bigtop-tests/smoke-tests/flume/conf/flume.conf     |     35 -
 bigtop-tests/smoke-tests/flume/log4j.properties    |     24 -
 bigtop-tests/smoke-tests/giraph/build.gradle       |     37 -
 bigtop-tests/smoke-tests/gpdb/TestGpdb.groovy      |     57 -
 bigtop-tests/smoke-tests/gpdb/build.gradle         |     32 -
 bigtop-tests/smoke-tests/hbase/build.gradle        |     81 -
 bigtop-tests/smoke-tests/hcfs/build.gradle         |     72 -
 bigtop-tests/smoke-tests/hdfs/build.gradle         |     87 -
 .../smoke-tests/hive/TestHiveSimple.groovy         |     59 -
 bigtop-tests/smoke-tests/hive/build.gradle         |     43 -
 bigtop-tests/smoke-tests/hive/log4j.properties     |     24 -
 bigtop-tests/smoke-tests/hive/passwd.ql            |      6 -
 .../ignite-hadoop/TestIgniteHadoop.groovy          |     72 -
 .../smoke-tests/ignite-hadoop/build.gradle         |     34 -
 .../smoke-tests/ignite-hadoop/conf/core-site.xml   |     90 -
 .../ignite-hadoop/conf/log4j.properties            |     24 -
 .../smoke-tests/ignite-hadoop/conf/mapred-site.xml |     66 -
 bigtop-tests/smoke-tests/ignite-hadoop/test.data   |     11 -
 .../smoke-tests/kafka/TestKafkaSmoke.groovy        |     65 -
 bigtop-tests/smoke-tests/kafka/build.gradle        |     35 -
 .../smoke-tests/kibana/TestKibanaSmoke.groovy      |     51 -
 bigtop-tests/smoke-tests/kibana/build.gradle       |     35 -
 bigtop-tests/smoke-tests/kms/build.gradle          |     52 -
 bigtop-tests/smoke-tests/livy/TestLivy.groovy      |    112 -
 bigtop-tests/smoke-tests/livy/build.gradle         |     38 -
 .../smoke-tests/logger-test-config/build.gradle    |     20 -
 .../src/main/resources/log4j.properties            |     20 -
 .../smoke-tests/logstash/TestLogstashSmoke.groovy  |     46 -
 bigtop-tests/smoke-tests/logstash/build.gradle     |     35 -
 .../smoke-tests/logstash/resources/generator.conf  |      7 -
 bigtop-tests/smoke-tests/mahout/build.gradle       |     35 -
 bigtop-tests/smoke-tests/mahout/log4j.properties   |     24 -
 bigtop-tests/smoke-tests/mapreduce/build.gradle    |     39 -
 bigtop-tests/smoke-tests/odpi-runtime/README.md    |     35 -
 bigtop-tests/smoke-tests/odpi-runtime/build.gradle |     63 -
 .../org/odpi/specs/runtime/hadoop/ApiExaminer.java |    491 -
 .../org/odpi/specs/runtime/hive/HCatalogMR.java    |    138 -
 .../src/main/resources/api-examiner-prep.sh        |     70 -
 .../org/odpi/specs/runtime/TestSpecsRuntime.groovy |    275 -
 .../org/odpi/specs/runtime/hive/HiveHelper.java    |    121 -
 .../org/odpi/specs/runtime/hive/JdbcConnector.java |     79 -
 .../org/odpi/specs/runtime/hive/TestBeeline.java   |    227 -
 .../java/org/odpi/specs/runtime/hive/TestCLI.java  |    236 -
 .../org/odpi/specs/runtime/hive/TestHCatalog.java  |    158 -
 .../java/org/odpi/specs/runtime/hive/TestJdbc.java |    547 -
 .../java/org/odpi/specs/runtime/hive/TestSql.java  |    337 -
 .../org/odpi/specs/runtime/hive/TestThrift.java    |    251 -
 .../src/test/python/find-public-apis.py            |     80 -
 .../resources/hadoop-common-2.7.3-api-report.json  |      1 -
 .../src/test/resources/hadoop-common-bin.list      |      2 -
 .../src/test/resources/hadoop-common-jar.list      |     60 -
 .../src/test/resources/hadoop-common.list          |    230 -
 .../resources/hadoop-hdfs-2.7.3-api-report.json    |      1 -
 .../src/test/resources/hadoop-hdfs-bin.list        |      1 -
 .../src/test/resources/hadoop-hdfs-jar.list        |     25 -
 .../src/test/resources/hadoop-hdfs.list            |     79 -
 .../src/test/resources/hadoop-mapreduce-bin.list   |      1 -
 ...oop-mapreduce-client-core-2.7.3-api-report.json |      1 -
 .../src/test/resources/hadoop-mapreduce-jar.list   |     22 -
 .../src/test/resources/hadoop-mapreduce.list       |    123 -
 .../src/test/resources/hadoop-subprojs.list        |      4 -
 .../hadoop-yarn-api-2.7.3-api-report.json          |      1 -
 .../src/test/resources/hadoop-yarn-bin.list        |      3 -
 .../hadoop-yarn-client-2.7.3-api-report.json       |      1 -
 .../hadoop-yarn-common-2.7.3-api-report.json       |      1 -
 .../src/test/resources/hadoop-yarn-jar.list        |     38 -
 .../src/test/resources/hadoop-yarn.list            |     74 -
 .../src/test/resources/testRuntimeSpecConf.groovy  |    430 -
 bigtop-tests/smoke-tests/oozie/build.gradle        |     46 -
 bigtop-tests/smoke-tests/phoenix/STOCK_SYMBOL.csv  |      9 -
 bigtop-tests/smoke-tests/phoenix/STOCK_SYMBOL.sql  |     24 -
 .../phoenix/TestPhoenixQueryServer.groovy          |     57 -
 bigtop-tests/smoke-tests/phoenix/build.gradle      |     33 -
 bigtop-tests/smoke-tests/phoenix/log4j.properties  |     24 -
 .../smoke-tests/phoenix/smoke-test-teardown.sql    |      1 -
 bigtop-tests/smoke-tests/phoenix/smoke-test.sql    |      1 -
 bigtop-tests/smoke-tests/qfs/build.gradle          |     38 -
 bigtop-tests/smoke-tests/run_itest.sh              |    284 -
 bigtop-tests/smoke-tests/solr/build.gradle         |     60 -
 bigtop-tests/smoke-tests/spark/TestSpark.groovy    |    115 -
 bigtop-tests/smoke-tests/spark/build.gradle        |     33 -
 .../smoke-tests/sqoop/TestSqoopETLHsql.groovy      |    160 -
 bigtop-tests/smoke-tests/sqoop/build.gradle        |     39 -
 bigtop-tests/smoke-tests/sqoop/log4j.properties    |     24 -
 bigtop-tests/smoke-tests/tez/TestTezSmoke.groovy   |     86 -
 bigtop-tests/smoke-tests/tez/build.gradle          |     35 -
 bigtop-tests/smoke-tests/yarn/build.gradle         |     51 -
 bigtop-tests/smoke-tests/ycsb/TestYcsbSmoke.groovy |     99 -
 bigtop-tests/smoke-tests/ycsb/build.gradle         |     35 -
 .../smoke-tests/zeppelin/TestZeppelinSmoke.groovy  |     50 -
 bigtop-tests/smoke-tests/zeppelin/build.gradle     |     35 -
 .../smoke-tests/zookeeper/TestZookeeper.groovy     |     66 -
 bigtop-tests/smoke-tests/zookeeper/build.gradle    |     32 -
 bigtop-tests/test-artifacts/README                 |     32 -
 bigtop-tests/test-artifacts/elasticsearch/pom.xml  |     39 -
 .../itest/elasticsearch/smoke/TestBase.groovy      |     46 -
 .../itest/elasticsearch/smoke/TestDocument.groovy  |     72 -
 .../itest/elasticsearch/smoke/TestInfo.groovy      |     43 -
 bigtop-tests/test-artifacts/fatjar/pom.xml         |    141 -
 .../fatjar/src/main/resources/log4j.properties     |     20 -
 bigtop-tests/test-artifacts/flume/pom.xml          |     43 -
 .../bigtop/itest/flumesmoke/TestFlumeSmoke.groovy  |     81 -
 .../main/resources/FlumeSmokeBzip2/flume-site.xml  |     25 -
 .../resources/FlumeSmokeDeflate/flume-site.xml     |     25 -
 .../main/resources/FlumeSmokeGzip/flume-site.xml   |     25 -
 .../flume/src/main/resources/events.txt            |  10000 --
 bigtop-tests/test-artifacts/giraph/pom.xml         |     33 -
 .../itest/giraphsmoke/TestGiraphSmoke.groovy       |     77 -
 bigtop-tests/test-artifacts/hadoop/README          |     24 -
 bigtop-tests/test-artifacts/hadoop/pom.xml         |     58 -
 .../bigtop/itest/hadoop/hcfs/FSCmdExecutor.java    |     73 -
 .../apache/bigtop/itest/hadoop/hcfs/TestCLI.java   |    165 -
 .../bigtop/itest/hadoop/hcfs/TestFuseHCFS.groovy   |    323 -
 .../itest/hadoop/hdfs/CommonFunctions.groovy       |     37 -
 .../itest/hadoop/hdfs/TestBlockRecovery.groovy     |    216 -
 .../bigtop/itest/hadoop/hdfs/TestChgrp.groovy      |    249 -
 .../bigtop/itest/hadoop/hdfs/TestCmdTest.groovy    |    321 -
 .../bigtop/itest/hadoop/hdfs/TestCmdText.groovy    |    195 -
 .../bigtop/itest/hadoop/hdfs/TestCount.groovy      |    230 -
 .../apache/bigtop/itest/hadoop/hdfs/TestCp.groovy  |    300 -
 .../bigtop/itest/hadoop/hdfs/TestDFSAdmin.groovy   |    154 -
 .../bigtop/itest/hadoop/hdfs/TestDFSCLI.java       |    109 -
 .../itest/hadoop/hdfs/TestDistCpIntra.groovy       |    165 -
 .../apache/bigtop/itest/hadoop/hdfs/TestDu.groovy  |    323 -
 .../bigtop/itest/hadoop/hdfs/TestFileAppend.groovy |    253 -
 .../bigtop/itest/hadoop/hdfs/TestFsck.groovy       |     51 -
 .../apache/bigtop/itest/hadoop/hdfs/TestGet.groovy |    242 -
 .../itest/hadoop/hdfs/TestHDFSBalancer.groovy      |    160 -
 .../bigtop/itest/hadoop/hdfs/TestHDFSCLI.java      |     32 -
 .../bigtop/itest/hadoop/hdfs/TestHDFSQuota.groovy  |    297 -
 .../apache/bigtop/itest/hadoop/hdfs/TestLs.groovy  |    234 -
 .../bigtop/itest/hadoop/hdfs/TestMkdir.groovy      |    166 -
 .../apache/bigtop/itest/hadoop/hdfs/TestMv.groovy  |    288 -
 .../apache/bigtop/itest/hadoop/hdfs/TestPut.groovy |    191 -
 .../bigtop/itest/hadoop/hdfs/TestStat.groovy       |    221 -
 .../bigtop/itest/hadoop/hdfs/TestTextSnappy.groovy |     62 -
 .../bigtop/itest/hadoop/hdfs/TestTouchz.groovy     |    172 -
 .../bigtop/itest/hadoop/hdfs/TestWebHDFS.groovy    |    545 -
 .../apache/bigtop/itest/hadoop/kms/TestKms.groovy  |     61 -
 .../hadoop/mapreduce/TestHadoopExamples.groovy     |    176 -
 .../itest/hadoop/mapreduce/TestHadoopSmoke.groovy  |     98 -
 .../bigtop/itest/hadoop/yarn/TestNode.groovy       |     44 -
 .../bigtop/itest/hadoop/yarn/TestRmAdmin.groovy    |     78 -
 .../hadoop/src/main/resources/cachedir.jar         |    Bin 585 -> 0 bytes
 .../src/main/resources/clitest_data/data120bytes   |      8 -
 .../src/main/resources/clitest_data/data15bytes    |      1 -
 .../hadoop/src/main/resources/clitest_data/data1k  |     71 -
 .../src/main/resources/clitest_data/data30bytes    |      2 -
 .../src/main/resources/clitest_data/data60bytes    |      4 -
 .../main/resources/clitest_data/testDFSConf.xml    |    252 -
 .../main/resources/clitest_data/testHCFSConf.xml   |  12588 --
 .../main/resources/clitest_data/testHDFSConf.xml   |    430 -
 .../src/main/resources/examples/ints/file1.txt     |  11000 --
 .../src/main/resources/examples/ints/file2.txt     |  11000 --
 .../src/main/resources/examples/text/pg11.txt      |   3735 -
 .../src/main/resources/examples/text/pg2265.txt    |   5302 -
 .../hadoop/src/main/resources/input.txt            |      2 -
 .../hadoop/src/main/resources/map.sh               |     18 -
 .../hadoop/src/main/resources/part-00001.snappy    |    Bin 50 -> 0 bytes
 .../hadoop/src/main/resources/test_data/test.zip   |    Bin 346 -> 0 bytes
 .../hadoop/src/main/resources/test_data/test_1.txt |      4 -
 .../hadoop/src/main/resources/test_data/test_2.txt |      4 -
 .../hadoop/src/main/resources/test_data/test_3     |   3321 -
 bigtop-tests/test-artifacts/hbase/pom.xml          |     65 -
 .../itest/hbase/smoke/IncrementalPELoad.java       |    119 -
 .../bigtop/itest/hbase/smoke/TestCopyTable.java    |    112 -
 .../itest/hbase/smoke/TestHBaseBalancer.groovy     |     69 -
 .../itest/hbase/smoke/TestHBaseCompression.groovy  |     91 -
 .../itest/hbase/smoke/TestHBaseImportExport.groovy |    119 -
 .../itest/hbase/smoke/TestHBasePigSmoke.groovy     |    123 -
 .../bigtop/itest/hbase/smoke/TestHBaseSmoke.java   |     85 -
 .../itest/hbase/smoke/TestHFileOutputFormat.java   |    216 -
 .../bigtop/itest/hbase/smoke/TestHbck.groovy       |     49 -
 .../bigtop/itest/hbase/smoke/TestImportTsv.groovy  |    232 -
 .../hbase/smoke/TestLoadIncrementalHFiles.java     |    116 -
 .../apache/bigtop/itest/hbase/system/Putter.java   |    126 -
 .../apache/bigtop/itest/hbase/system/Scanner.java  |    140 -
 .../hbase/system/TestConcurrentScanAndPut.java     |    166 -
 .../itest/hbase/system/TestLoadAndVerify.java      |    405 -
 .../bigtop/itest/hbase/system/TestRegionMover.java |    128 -
 .../bigtop/itest/hbase/util/HBaseTestUtil.java     |    140 -
 .../hbase/src/main/resources/movies.psv            |   1682 -
 .../hbase/src/main/resources/movies.tsv            |   1682 -
 bigtop-tests/test-artifacts/hcatalog/README        |     16 -
 bigtop-tests/test-artifacts/hcatalog/pom.xml       |     36 -
 .../itest/hcatalogsmoke/TestHcatalogBasic.groovy   |    144 -
 .../src/main/resources/data/data-2013-01-01.txt    |      4 -
 .../src/main/resources/data/data-2013-01-02.txt    |      3 -
 .../src/main/resources/hcat_basic_count.expected   |      1 -
 .../main/resources/hcat_basic_describe.expected    |      3 -
 .../main/resources/hcat_basic_partitions.expected  |      2 -
 bigtop-tests/test-artifacts/httpfs/pom.xml         |     38 -
 .../apache/bigtop/itest/httpfs/TestHttpFs.groovy   |    225 -
 .../src/main/resources/text-files/helloworld.txt   |      1 -
 bigtop-tests/test-artifacts/hue/pom.xml            |     33 -
 .../bigtop/itest/huesmoke/TestHueSmoke.groovy      |     66 -
 bigtop-tests/test-artifacts/longevity/pom.xml      |     41 -
 .../org/apache/bigtop/itest/iolongevity/.gitignore |      0
 .../bigtop/itest/iolongevity/TestDFSIO.groovy      |    117 -
 .../bigtop/itest/iolongevity/TestSLive.groovy      |    142 -
 bigtop-tests/test-artifacts/mahout/pom.xml         |     33 -
 .../itest/mahout/smoke/TestMahoutExamples.groovy   |    325 -
 bigtop-tests/test-artifacts/oozie/pom.xml          |     39 -
 .../bigtop/itest/ooziesmoke/TestOozieSmoke.groovy  |    141 -
 bigtop-tests/test-artifacts/package/pom.xml        |     78 -
 .../bigtop/itest/packagesmoke/BTServices.groovy    |    107 -
 .../itest/packagesmoke/PackageTestCommon.groovy    |    519 -
 .../itest/packagesmoke/PackageTestErrorProxy.java  |     40 -
 .../itest/packagesmoke/PackageTestMatchers.java    |     85 -
 .../itest/packagesmoke/PackageTestRepoMgr.groovy   |     91 -
 .../bigtop/itest/packagesmoke/StateVerifier.groovy |     30 -
 .../itest/packagesmoke/StateVerifierFlume.groovy   |     50 -
 .../itest/packagesmoke/StateVerifierHBase.groovy   |     47 -
 .../itest/packagesmoke/StateVerifierHDFS.groovy    |     73 -
 .../itest/packagesmoke/StateVerifierHive.groovy    |     43 -
 .../itest/packagesmoke/StateVerifierHue.groovy     |     57 -
 .../packagesmoke/StateVerifierMapreduce.groovy     |     37 -
 .../itest/packagesmoke/StateVerifierOozie.groovy   |     50 -
 .../itest/packagesmoke/StateVerifierSqoop.groovy   |     39 -
 .../packagesmoke/StateVerifierZookeeper.groovy     |     36 -
 .../itest/packagesmoke/TestPackagesBasics.groovy   |    277 -
 .../packagesmoke/TestPackagesBasicsWithRM.groovy   |     36 -
 .../TestPackagesPseudoDistributed.groovy           |     60 -
 .../TestPackagesPseudoDistributedDependency.groovy |     57 -
 ...estPackagesPseudoDistributedFileContents.groovy |     58 -
 .../TestPackagesPseudoDistributedServices.groovy   |     57 -
 .../TestPackagesPseudoDistributedState.groovy      |     29 -
 .../TestPackagesPseudoDistributedUpgrade.groovy    |     32 -
 .../TestPackagesPseudoDistributedWithRM.groovy     |     45 -
 .../bigtop/itest/packagesmoke/TestServices.groovy  |    138 -
 .../packagesmoke/TestServicesCreateState.groovy    |     39 -
 .../TestServicesCreateStateMissing.groovy          |     31 -
 .../packagesmoke/TestServicesVerifyState.groovy    |     33 -
 .../package/src/main/resources/apt/bigtop-jsvc.xml |     18 -
 .../src/main/resources/apt/bigtop-tomcat.xml       |     67 -
 .../src/main/resources/apt/bigtop-utils.xml        |     16 -
 .../package/src/main/resources/apt/flume-agent.xml |     14 -
 .../package/src/main/resources/apt/flume.xml       |    107 -
 .../package/src/main/resources/apt/giraph.xml      |      5 -
 .../src/main/resources/apt/hadoop-client.xml       |     48 -
 .../src/main/resources/apt/hadoop-conf-pseudo.xml  |     20 -
 .../src/main/resources/apt/hadoop-datanode.xml     |     15 -
 .../package/src/main/resources/apt/hadoop-doc.xml  |   2574 -
 .../main/resources/apt/hadoop-hdfs-datanode.xml    |     10 -
 .../src/main/resources/apt/hadoop-hdfs-fuse.xml    |     22 -
 .../main/resources/apt/hadoop-hdfs-namenode.xml    |     10 -
 .../apt/hadoop-hdfs-secondarynamenode.xml          |     10 -
 .../src/main/resources/apt/hadoop-hdfs-zkfc.xml    |     10 -
 .../package/src/main/resources/apt/hadoop-hdfs.xml |     62 -
 .../src/main/resources/apt/hadoop-httpfs.xml       |    234 -
 .../src/main/resources/apt/hadoop-jobtracker.xml   |     15 -
 .../apt/hadoop-mapreduce-historyserver.xml         |     10 -
 .../src/main/resources/apt/hadoop-mapreduce.xml    |     67 -
 .../src/main/resources/apt/hadoop-namenode.xml     |     15 -
 .../src/main/resources/apt/hadoop-native.xml       |     25 -
 .../src/main/resources/apt/hadoop-pipes.xml        |     21 -
 .../resources/apt/hadoop-secondarynamenode.xml     |     15 -
 .../src/main/resources/apt/hadoop-source.xml       |   3908 -
 .../src/main/resources/apt/hadoop-tasktracker.xml  |     15 -
 .../main/resources/apt/hadoop-yarn-nodemanager.xml |     10 -
 .../main/resources/apt/hadoop-yarn-proxyserver.xml |     10 -
 .../resources/apt/hadoop-yarn-resourcemanager.xml  |     10 -
 .../package/src/main/resources/apt/hadoop-yarn.xml |     67 -
 .../package/src/main/resources/apt/hadoop.xml      |    109 -
 .../package/src/main/resources/apt/hbase-doc.xml   |   2534 -
 .../src/main/resources/apt/hbase-master.xml        |     17 -
 .../src/main/resources/apt/hbase-regionserver.xml  |     17 -
 .../package/src/main/resources/apt/hbase-rest.xml  |     17 -
 .../src/main/resources/apt/hbase-thrift.xml        |     17 -
 .../package/src/main/resources/apt/hbase.xml       |    198 -
 .../package/src/main/resources/apt/hive-jdbc.xml   |     15 -
 .../src/main/resources/apt/hive-metastore.xml      |     16 -
 .../package/src/main/resources/apt/hive-server.xml |     16 -
 .../package/src/main/resources/apt/hive.xml        |    308 -
 .../src/main/resources/apt/libhdfs0-dev.xml        |      5 -
 .../package/src/main/resources/apt/libhdfs0.xml    |      9 -
 .../package/src/main/resources/apt/mahout.xml      |    454 -
 .../src/main/resources/apt/oozie-client.xml        |   3576 -
 .../package/src/main/resources/apt/oozie.xml       |    154 -
 .../src/main/resources/apt/package_data.xml        |    560 -
 .../package/src/main/resources/apt/solr-doc.xml    |   2677 -
 .../package/src/main/resources/apt/solr-server.xml |     14 -
 .../package/src/main/resources/apt/solr.xml        |    484 -
 .../package/src/main/resources/apt/sqoop.xml       |    171 -
 .../package/src/main/resources/apt/whirr.xml       |   1289 -
 .../src/main/resources/apt/zookeeper-server.xml    |     14 -
 .../package/src/main/resources/apt/zookeeper.xml   |    388 -
 .../package/src/main/resources/package_data.xml    |   1043 -
 .../src/main/resources/urpmi/bigtop-jsvc.xml       |      5 -
 .../src/main/resources/urpmi/bigtop-tomcat.xml     |      5 -
 .../package/src/main/resources/urpmi/giraph.xml    |      5 -
 .../src/main/resources/urpmi/hadoop-client.xml     |      5 -
 .../src/main/resources/urpmi/hadoop-hdfs-fuse.xml  |      5 -
 .../src/main/resources/urpmi/hadoop-hdfs-zkfc.xml  |      5 -
 .../src/main/resources/urpmi/hadoop-libhdfs.xml    |      5 -
 .../src/main/resources/urpmi/hbase-rest.xml        |      5 -
 .../src/main/resources/urpmi/package_data.xml      |   7641 --
 .../package/src/main/resources/yum/bigtop-jsvc.xml |     11 -
 .../src/main/resources/yum/bigtop-tomcat.xml       |     60 -
 .../src/main/resources/yum/bigtop-utils.xml        |      9 -
 .../package/src/main/resources/yum/flume-agent.xml |      5 -
 .../package/src/main/resources/yum/flume.xml       |     97 -
 .../package/src/main/resources/yum/giraph.xml      |      5 -
 .../src/main/resources/yum/hadoop-client.xml       |     44 -
 .../src/main/resources/yum/hadoop-conf-pseudo.xml  |     12 -
 .../src/main/resources/yum/hadoop-datanode.xml     |      5 -
 .../src/main/resources/yum/hadoop-debuginfo.xml    |     22 -
 .../package/src/main/resources/yum/hadoop-doc.xml  |   2567 -
 .../main/resources/yum/hadoop-hdfs-datanode.xml    |      6 -
 .../src/main/resources/yum/hadoop-hdfs-fuse.xml    |      7 -
 .../main/resources/yum/hadoop-hdfs-namenode.xml    |      6 -
 .../yum/hadoop-hdfs-secondarynamenode.xml          |      6 -
 .../src/main/resources/yum/hadoop-hdfs-zkfc.xml    |      6 -
 .../package/src/main/resources/yum/hadoop-hdfs.xml |     48 -
 .../src/main/resources/yum/hadoop-httpfs.xml       |    222 -
 .../src/main/resources/yum/hadoop-jobtracker.xml   |      5 -
 .../src/main/resources/yum/hadoop-libhdfs.xml      |      8 -
 .../yum/hadoop-mapreduce-historyserver.xml         |      6 -
 .../src/main/resources/yum/hadoop-mapreduce.xml    |     52 -
 .../src/main/resources/yum/hadoop-namenode.xml     |      5 -
 .../src/main/resources/yum/hadoop-native.xml       |     11 -
 .../src/main/resources/yum/hadoop-pipes.xml        |     10 -
 .../resources/yum/hadoop-secondarynamenode.xml     |      5 -
 .../src/main/resources/yum/hadoop-source.xml       |   3934 -
 .../src/main/resources/yum/hadoop-tasktracker.xml  |      5 -
 .../main/resources/yum/hadoop-yarn-nodemanager.xml |      6 -
 .../main/resources/yum/hadoop-yarn-proxyserver.xml |      6 -
 .../resources/yum/hadoop-yarn-resourcemanager.xml  |      6 -
 .../package/src/main/resources/yum/hadoop-yarn.xml |     52 -
 .../package/src/main/resources/yum/hadoop.xml      |     92 -
 .../package/src/main/resources/yum/hbase-doc.xml   |   2528 -
 .../src/main/resources/yum/hbase-master.xml        |      5 -
 .../src/main/resources/yum/hbase-regionserver.xml  |      5 -
 .../package/src/main/resources/yum/hbase-rest.xml  |      5 -
 .../src/main/resources/yum/hbase-thrift.xml        |      5 -
 .../package/src/main/resources/yum/hbase.xml       |    178 -
 .../package/src/main/resources/yum/hive-jdbc.xml   |      7 -
 .../src/main/resources/yum/hive-metastore.xml      |      6 -
 .../package/src/main/resources/yum/hive-server.xml |      6 -
 .../package/src/main/resources/yum/hive.xml        |    427 -
 .../package/src/main/resources/yum/mahout.xml      |    444 -
 .../src/main/resources/yum/oozie-client.xml        |   3554 -
 .../package/src/main/resources/yum/oozie.xml       |    148 -
 .../src/main/resources/yum/package_data.xml        |    595 -
 .../package/src/main/resources/yum/solr-doc.xml    |   2671 -
 .../package/src/main/resources/yum/solr-server.xml |      5 -
 .../package/src/main/resources/yum/solr.xml        |    469 -
 .../package/src/main/resources/yum/sqoop.xml       |    157 -
 .../package/src/main/resources/yum/whirr.xml       |   1267 -
 .../src/main/resources/yum/zookeeper-server.xml    |      5 -
 .../package/src/main/resources/yum/zookeeper.xml   |    373 -
 .../src/main/resources/zypper/bigtop-jsvc.xml      |     11 -
 .../src/main/resources/zypper/bigtop-tomcat.xml    |     60 -
 .../src/main/resources/zypper/bigtop-utils.xml     |      8 -
 .../src/main/resources/zypper/flume-agent.xml      |      5 -
 .../package/src/main/resources/zypper/flume.xml    |     97 -
 .../package/src/main/resources/zypper/giraph.xml   |      5 -
 .../src/main/resources/zypper/hadoop-client.xml    |     44 -
 .../main/resources/zypper/hadoop-conf-pseudo.xml   |     12 -
 .../src/main/resources/zypper/hadoop-datanode.xml  |      5 -
 .../src/main/resources/zypper/hadoop-doc.xml       |   2567 -
 .../main/resources/zypper/hadoop-hdfs-datanode.xml |      6 -
 .../src/main/resources/zypper/hadoop-hdfs-fuse.xml |      7 -
 .../main/resources/zypper/hadoop-hdfs-namenode.xml |      6 -
 .../zypper/hadoop-hdfs-secondarynamenode.xml       |      6 -
 .../src/main/resources/zypper/hadoop-hdfs-zkfc.xml |      6 -
 .../src/main/resources/zypper/hadoop-hdfs.xml      |     48 -
 .../src/main/resources/zypper/hadoop-httpfs.xml    |    222 -
 .../main/resources/zypper/hadoop-jobtracker.xml    |      5 -
 .../src/main/resources/zypper/hadoop-libhdfs.xml   |      8 -
 .../zypper/hadoop-mapreduce-historyserver.xml      |      6 -
 .../src/main/resources/zypper/hadoop-mapreduce.xml |     52 -
 .../src/main/resources/zypper/hadoop-namenode.xml  |      5 -
 .../src/main/resources/zypper/hadoop-native.xml    |     11 -
 .../src/main/resources/zypper/hadoop-pipes.xml     |     10 -
 .../resources/zypper/hadoop-secondarynamenode.xml  |      5 -
 .../src/main/resources/zypper/hadoop-source.xml    |   3915 -
 .../main/resources/zypper/hadoop-tasktracker.xml   |      5 -
 .../resources/zypper/hadoop-yarn-nodemanager.xml   |      6 -
 .../resources/zypper/hadoop-yarn-proxyserver.xml   |      6 -
 .../zypper/hadoop-yarn-resourcemanager.xml         |      6 -
 .../src/main/resources/zypper/hadoop-yarn.xml      |     52 -
 .../package/src/main/resources/zypper/hadoop.xml   |     92 -
 .../src/main/resources/zypper/hbase-doc.xml        |   2528 -
 .../src/main/resources/zypper/hbase-master.xml     |      5 -
 .../main/resources/zypper/hbase-regionserver.xml   |      5 -
 .../src/main/resources/zypper/hbase-rest.xml       |      5 -
 .../src/main/resources/zypper/hbase-thrift.xml     |      5 -
 .../package/src/main/resources/zypper/hbase.xml    |    178 -
 .../src/main/resources/zypper/hive-jdbc.xml        |      7 -
 .../src/main/resources/zypper/hive-metastore.xml   |      6 -
 .../src/main/resources/zypper/hive-server.xml      |      6 -
 .../package/src/main/resources/zypper/hive.xml     |    347 -
 .../package/src/main/resources/zypper/mahout.xml   |    444 -
 .../src/main/resources/zypper/oozie-client.xml     |   3554 -
 .../package/src/main/resources/zypper/oozie.xml    |    148 -
 .../src/main/resources/zypper/package_data.xml     |    574 -
 .../package/src/main/resources/zypper/solr-doc.xml |   2671 -
 .../src/main/resources/zypper/solr-server.xml      |      5 -
 .../package/src/main/resources/zypper/solr.xml     |    469 -
 .../package/src/main/resources/zypper/sqoop.xml    |    158 -
 .../src/main/resources/zypper/zookeeper-server.xml |      5 -
 .../src/main/resources/zypper/zookeeper.xml        |    373 -
 bigtop-tests/test-artifacts/phoenix/pom.xml        |     33 -
 .../itest/phoenix/smoke/TestPhoenixSmoke.groovy    |     49 -
 bigtop-tests/test-artifacts/pom.xml                |    148 -
 bigtop-tests/test-artifacts/solr/pom.xml           |     44 -
 .../bigtop/itest/solr/smoke/SolrTestBase.groovy    |     85 -
 .../bigtop/itest/solr/smoke/TestIndexing.groovy    |     40 -
 .../itest/solr/smoke/TestIndexingSolrJ.groovy      |     94 -
 .../apache/bigtop/itest/solr/smoke/TestPing.groovy |     29 -
 .../bigtop/itest/solr/smoke/TestSimple.groovy      |     41 -
 .../bigtop/itest/solr/smoke/TestStatistics.groovy  |     38 -
 bigtop-tests/test-artifacts/spark/pom.xml          |     66 -
 .../bigtop/itest/spark/TestSparkExample.groovy     |     79 -
 .../bigtop/itest/spark/TestSparkSmoke.groovy       |     98 -
 .../spark/src/main/resources/kmeans_data.txt       |      6 -
 bigtop-tests/test-artifacts/sqoop/pom.xml          |     46 -
 .../sqoop/IntegrationTestSqoopHBase.groovy         |    114 -
 .../sqoop/IntegrationTestSqoopHive.groovy          |    100 -
 .../main/resources/hbase-sqoop/create-table.hxt    |      1 -
 .../src/main/resources/hbase-sqoop/drop-table.hxt  |      2 -
 .../hbase-sqoop/expected-hbase-output.txt          |     12 -
 .../main/resources/hbase-sqoop/mysql-create-db.sql |     38 -
 .../main/resources/hbase-sqoop/mysql-load-db.sql   |     38 -
 .../main/resources/hbase-sqoop/select-table.hxt    |      1 -
 .../resources/hive-sqoop/expected-hive-output.txt  |     12 -
 .../main/resources/hive-sqoop/hive-drop-table.hql  |     15 -
 .../resources/hive-sqoop/hive-select-table.hql     |     15 -
 .../main/resources/hive-sqoop/mysql-create-db.sql  |     38 -
 .../main/resources/hive-sqoop/mysql-load-db.sql    |     38 -
 .../main/resources/mysql-files/mysql-create-db.sql |     39 -
 .../resources/mysql-files/mysql-create-tables.sql  |     54 -
 .../resources/mysql-files/mysql-insert-data.sql    |     57 -
 .../resources/mysql-files/sqoop-all-tables.out     |      2 -
 .../main/resources/mysql-files/sqoop-append.out    |     20 -
 .../main/resources/mysql-files/sqoop-columns.out   |     10 -
 .../mysql-files/sqoop-null-non-string.out          |      3 -
 .../resources/mysql-files/sqoop-null-string.out    |      3 -
 .../src/main/resources/mysql-files/sqoop-query.out |      2 -
 .../resources/mysql-files/sqoop-t_bool-export.out  |      1 -
 .../main/resources/mysql-files/sqoop-t_bool.out    |      1 -
 .../resources/mysql-files/sqoop-t_date-export.out  |      1 -
 .../main/resources/mysql-files/sqoop-t_date.out    |      1 -
 .../src/main/resources/mysql-files/sqoop-t_fp.out  |      1 -
 .../src/main/resources/mysql-files/sqoop-t_int.out |      1 -
 .../main/resources/mysql-files/sqoop-t_string.out  |      1 -
 .../main/resources/mysql-files/sqoop-testtable.out |     10 -
 .../resources/mysql-files/sqoop-where-clause.out   |      4 -
 bigtop-tests/test-execution/README                 |     27 -
 bigtop-tests/test-execution/common/pom.xml         |    284 -
 .../test-execution/conf/log4j.configuration        |     20 -
 bigtop-tests/test-execution/conf/pom.xml           |     39 -
 .../org.apache.bigtop.itest.log4j.configuration    |     20 -
 bigtop-tests/test-execution/integration/pom.xml    |    114 -
 .../test-execution/integration/sqoop/pom.xml       |    109 -
 bigtop-tests/test-execution/longevity/io/pom.xml   |     74 -
 bigtop-tests/test-execution/longevity/pom.xml      |    108 -
 bigtop-tests/test-execution/package/pom.xml        |     76 -
 bigtop-tests/test-execution/pom.xml                |     39 -
 .../test-execution/smokes/elasticsearch/pom.xml    |     58 -
 bigtop-tests/test-execution/smokes/flume/pom.xml   |     57 -
 bigtop-tests/test-execution/smokes/giraph/pom.xml  |    102 -
 bigtop-tests/test-execution/smokes/hadoop/pom.xml  |    122 -
 bigtop-tests/test-execution/smokes/hbase/pom.xml   |    197 -
 .../test-execution/smokes/hcatalog/pom.xml         |     83 -
 bigtop-tests/test-execution/smokes/httpfs/pom.xml  |     97 -
 bigtop-tests/test-execution/smokes/hue/pom.xml     |     93 -
 bigtop-tests/test-execution/smokes/mahout/pom.xml  |     96 -
 bigtop-tests/test-execution/smokes/oozie/pom.xml   |     95 -
 bigtop-tests/test-execution/smokes/phoenix/pom.xml |     60 -
 bigtop-tests/test-execution/smokes/pom.xml         |    204 -
 bigtop-tests/test-execution/smokes/solr/pom.xml    |     95 -
 bigtop-tests/test-execution/smokes/spark/pom.xml   |    166 -
 bigtop-tests/test-execution/smokes/sqoop/pom.xml   |    103 -
 bigtop.bom                                         |    458 -
 src/site/resources/bigtop.rdf => bigtop.rdf        |      0
 bigtop_toolchain/README.md                         |    102 -
 bigtop_toolchain/bin/puppetize.sh                  |     81 -
 ...generic-GCC-support-for-atomic-operations.patch |    209 -
 .../puppet/parser/functions/latest_ant_binary.rb   |     22 -
 .../puppet/parser/functions/latest_maven_binary.rb |     22 -
 .../parser/functions/nearest_apache_mirror.rb      |     20 -
 bigtop_toolchain/manifests/ant.pp                  |     47 -
 bigtop_toolchain/manifests/cleanup.pp              |     33 -
 bigtop_toolchain/manifests/deployment_tools.pp     |     18 -
 bigtop_toolchain/manifests/development_tools.pp    |     18 -
 bigtop_toolchain/manifests/env.pp                  |     53 -
 bigtop_toolchain/manifests/gnupg.pp                |     36 -
 bigtop_toolchain/manifests/gradle.pp               |     34 -
 bigtop_toolchain/manifests/groovy.pp               |     34 -
 bigtop_toolchain/manifests/installer.pp            |     31 -
 bigtop_toolchain/manifests/jdk.pp                  |     66 -
 bigtop_toolchain/manifests/maven.pp                |     48 -
 bigtop_toolchain/manifests/packages.pp             |    271 -
 bigtop_toolchain/manifests/protobuf.pp             |     43 -
 bigtop_toolchain/manifests/puppet_modules.pp       |     43 -
 .../manifests/puppet_modules_prereq.pp             |     21 -
 bigtop_toolchain/manifests/renv.pp                 |     54 -
 bigtop_toolchain/manifests/user.pp                 |     47 -
 bigtop_toolchain/manifests/vagrant.pp              |     43 -
 bigtop_toolchain/templates/jenkins.sh              |     22 -
 build.gradle                                       |    552 -
 css/apache-maven-fluido-1.7.min.css                |     17 +
 .../site/resources/css => css}/freebsd_docbook.css |      0
 css/print.css                                      |     23 +
 {src/site/resources/css => css}/site.css           |      0
 dependencies.html                                  |    116 +
 dependency-info.html                               |    144 +
 dependency-management.html                         |    377 +
 distribution-management.html                       |    122 +
 docker/bigtop-puppet/build.sh                      |     55 -
 docker/bigtop-slaves/Dockerfile.template           |     26 -
 docker/bigtop-slaves/build.sh                      |     89 -
 docker/pseudo-cluster/Dockerfile                   |     31 -
 docker/pseudo-cluster/config/configure.sh          |     25 -
 docker/pseudo-cluster/config/hieradata/site.yaml   |     10 -
 docker/sandbox/README.md                           |    112 -
 docker/sandbox/build.sh                            |    232 -
 docker/sandbox/docker-compose.yml                  |     23 -
 docker/sandbox/sandbox-env.sh                      |     34 -
 docker/sandbox/site.yaml.template.centos-6_hadoop  |     19 -
 docker/sandbox/site.yaml.template.debian-8_hadoop  |     19 -
 docker/sandbox/startup.sh                          |     48 -
 docs/logo.jpg                                      |    Bin 8053 -> 0 bytes
 download.html                                      |    239 +
 fonts/glyphicons-halflings-regular.eot             |    Bin 0 -> 35283 bytes
 fonts/glyphicons-halflings-regular.svg             |    229 +
 fonts/glyphicons-halflings-regular.ttf             |    Bin 0 -> 55016 bytes
 fonts/glyphicons-halflings-regular.woff            |    Bin 0 -> 41793 bytes
 gradle/wrapper/gradle-wrapper.properties           |     20 -
 gradlew                                            |    220 -
 images/accessories-text-editor.png                 |    Bin 0 -> 746 bytes
 images/add.gif                                     |    Bin 0 -> 397 bytes
 .../images => images}/apache-incubator-logo.png    |    Bin
 images/apache-maven-project-2.png                  |    Bin 0 -> 43073 bytes
 images/application-certificate.png                 |    Bin 0 -> 923 bytes
 .../resources/images => images}/bigtop-logo.ai     |      0
 .../resources/images => images}/bigtop-logo.png    |    Bin
 images/close.gif                                   |    Bin 0 -> 279 bytes
 images/contact-new.png                             |    Bin 0 -> 736 bytes
 images/document-properties.png                     |    Bin 0 -> 577 bytes
 images/drive-harddisk.png                          |    Bin 0 -> 700 bytes
 images/fix.gif                                     |    Bin 0 -> 366 bytes
 images/icon_error_sml.gif                          |    Bin 0 -> 633 bytes
 images/icon_help_sml.gif                           |    Bin 0 -> 1072 bytes
 images/icon_info_sml.gif                           |    Bin 0 -> 638 bytes
 images/icon_success_sml.gif                        |    Bin 0 -> 604 bytes
 images/icon_warning_sml.gif                        |    Bin 0 -> 625 bytes
 images/image-x-generic.png                         |    Bin 0 -> 662 bytes
 images/internet-web-browser.png                    |    Bin 0 -> 1017 bytes
 images/logos/build-by-maven-black.png              |    Bin 0 -> 2294 bytes
 images/logos/build-by-maven-white.png              |    Bin 0 -> 2260 bytes
 images/logos/maven-feather.png                     |    Bin 0 -> 3330 bytes
 images/network-server.png                          |    Bin 0 -> 536 bytes
 images/package-x-generic.png                       |    Bin 0 -> 717 bytes
 images/profiles/pre-release.png                    |    Bin 0 -> 32607 bytes
 images/profiles/retired.png                        |    Bin 0 -> 22003 bytes
 images/profiles/sandbox.png                        |    Bin 0 -> 33010 bytes
 images/remove.gif                                  |    Bin 0 -> 607 bytes
 images/rss.png                                     |    Bin 0 -> 474 bytes
 images/update.gif                                  |    Bin 0 -> 1090 bytes
 images/window-new.png                              |    Bin 0 -> 583 bytes
 img/glyphicons-halflings-white.png                 |    Bin 0 -> 8777 bytes
 img/glyphicons-halflings.png                       |    Bin 0 -> 12799 bytes
 index.html                                         |    210 +
 integration.html                                   |    115 +
 irc-channel.html                                   |    128 +
 issue-tracking.html                                |    138 +
 js/apache-maven-fluido-1.7.min.js                  |     25 +
 .../common/bigtop-utils/LICENSE => license.html    |    161 +-
 mail-lists.html                                    |    283 +
 packages.gradle                                    |    884 -
 plugin-management.html                             |    251 +
 plugins.html                                       |    158 +
 pom.xml                                            |    474 -
 project-info.html                                  |    164 +
 project-summary.html                               |    160 +
 provisioner/docker/.gitignore                      |      3 -
 provisioner/docker/README.md                       |    148 -
 provisioner/docker/config.yaml                     |      1 -
 provisioner/docker/config_centos-7.yaml            |     24 -
 provisioner/docker/config_centos-8.yaml            |     24 -
 provisioner/docker/config_debian-10.yaml           |     24 -
 provisioner/docker/config_debian-9.yaml            |     24 -
 provisioner/docker/config_fedora-31.yaml           |     24 -
 provisioner/docker/config_ubuntu-16.04.yaml        |     24 -
 provisioner/docker/config_ubuntu-18.04.yaml        |     24 -
 provisioner/docker/docker-compose.yml              |     27 -
 provisioner/docker/docker-hadoop.sh                |    416 -
 provisioner/utils/setup-env-centos.sh              |     57 -
 provisioner/utils/setup-env-debian.sh              |     45 -
 provisioner/utils/smoke-tests.sh                   |     91 -
 provisioner/vagrant/.gitignore                     |      1 -
 provisioner/vagrant/README.md                      |    114 -
 provisioner/vagrant/Vagrantfile                    |    162 -
 provisioner/vagrant/config_hosts                   |     45 -
 provisioner/vagrant/create-sandboxes.sh            |     32 -
 provisioner/vagrant/gen_hosts.sh                   |     44 -
 provisioner/vagrant/vagrantconfig.yaml             |     25 -
 release-notes.html                                 |  10206 ++
 release.gradle                                     |     74 -
 settings.gradle                                    |     34 -
 source-repository.html                             |    130 +
 src/assembly/release-assembly.xml                  |     71 -
 src/site/site.xml                                  |     96 -
 src/site/xdoc/download.xml                         |    115 -
 src/site/xdoc/index.xml                            |     84 -
 src/site/xdoc/irc-channel.xml                      |     26 -
 src/site/xdoc/issue-tracking.xml                   |     33 -
 src/site/xdoc/mail-lists.xml                       |    140 -
 src/site/xdoc/release-notes.xml                    |   7494 -
 src/site/xdoc/team-list.xml                        |    222 -
 team-list.html                                     |    470 +
 test/MANIFEST.txt                                  |      8 -
 test/NOTICE.txt                                    |     15 -
 test/site/pom.xml                                  |     91 -
 test/site/src/site/apt/devguide.apt                |     32 -
 test/site/src/site/apt/downloads.apt               |     51 -
 test/site/src/site/apt/examples.apt                |    103 -
 test/site/src/site/apt/index.apt                   |     38 -
 test/site/src/site/apt/itest.apt                   |     63 -
 test/site/src/site/apt/userguide.apt               |     98 -
 test/site/src/site/fml/faq.fml                     |     72 -
 test/site/src/site/resources/images/banner.png     |    Bin 6965 -> 0 bytes
 test/site/src/site/resources/images/itest.png      |    Bin 3345 -> 0 bytes
 test/site/src/site/site.xml                        |     59 -
 2156 files changed, 13846 insertions(+), 503668 deletions(-)

diff --git a/BUILDING.txt b/BUILDING.txt
deleted file mode 100644
index 85ff0c2..0000000
--- a/BUILDING.txt
+++ /dev/null
@@ -1,104 +0,0 @@
-***************
-Building Bigtop
-***************
-
-Build Requirements:
-===================
-
-* A stock image of a Linux Distribution, supported are
-  - Debian 8
-  - Centos 6,7
-  - Ubuntu 14.04 LTS
-  - SLES 11 SP3
-  - Fedora 20
-  - openSuSE 13.1
-
-* A fast Internet Connection, since compile process will download a lot
-
-* Recommended is to install dependencies via puppet.
-  Either
-  - Read bigtop_toolchain/README.md
-  or
-  - install puppet and run
-    puppet apply --modulepath=<path_to_bigtop> -e "include bigtop_toolchain::installer"
-
-Building
-========
-
-For Debian, Ubuntu Bigtop supports the deb packaging format and apt repositories
-
-For Centos, Fedora Bigtop supports rpm packaging and yum repositories
-
-For openSuSE, SLES Bigtop supports rpm packaging but zypper seems not supported.
-
-Bigtop consists of many packages which should be compiled in order:
-See bigtop.mk for the list of packages and names.
-
-For doing a full recompile do a
-  gradle deb
-or
-  gradle rpm
-depending on Linux distribution.
-
-If you want to create a repository
-  gradle apt
-or
-  gradle yum
-
-Build results are in
-  output/<names>
-
-The $(HOME)/.m2 will contain several GB of downloads afterwards as collateral damage.
-
-Each package will have a "-1" release part by default.
-
-Create updated Packages
-=======================
-
-In order to create releases where one can update machines (for
-instance in production) it is advisable to increase the release part for every rebuild.
-
-This can be done by using the BIGTOP_BUILD_STAMP to set the release part of the version of every package.
-
-For instance by default a package hadoop-2.4.1-1.deb will be created. If you need to fix this package and recompile it run:
-
-BIGTOP_BUILD_STAMP="2" gradle hadoop-apt
-
-This updates the package to hadoop-2.4.1-2.deb in repository, removing the old one.
-
-Please do not use a hyphen "-" in the BIGTOP_BUILD_STAMP breaking the
-algorithms apt and yum are using for calculating dependencies.
-
-Integration into a CI system: jenkins
-=====================================
-
-
-A nightly build job can be created with
-
-  gradle clean
-  BIGTOP_BUILD_STAMP="nightly-${BUILD_NUMBER}" gradle apt|yum
-
-For more background information on BIGTOP_BUILD_STAMP see ticket BIGTOP-1580.
-
-
-Using a different JVM
-=====================
-
-By default, bigtop is compiled with a JDK version 8. If you want to change it,
-for example to use JDK version 7, you just have to define the variable
-BIGTOP_JDK:
-
-BIGTOP_JDK=7; export BIGTOP_JDK
-./gradlew deb
-
-
-Note on LC_ALL/LANG
-===================
-
-Compilation may fail if your locale is not in english. You might for example
-see some strange errors with dpkg-buildpackage. In that case, you should reset
-the language variables before compiling:
-
-LC_ALL=C; export LC_ALL
-LANG=C; export LANG
-./gradlew deb
diff --git a/CHANGES.txt b/CHANGES.txt
deleted file mode 100644
index a97f19b..0000000
--- a/CHANGES.txt
+++ /dev/null
@@ -1,2722 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-Bigtop Changelog
-
-* Release 1.5.0 (unreleased)
-
-* Release 1.4.0 (2019-03-24)
-
-
-Release Notes - Bigtop - Version 1.4.0
-
-** Sub-task
-    * [BIGTOP-2012] - Add new tests for webhdfs
-    * [BIGTOP-2993] - Switch to gradle XXX-pkg-ind feature developed in BIGTOP-2949 for Bigtop CI
-    * [BIGTOP-2994] - Document the new feature added in BIGTOP-2949
-    * [BIGTOP-2995] - Add example CI pipeline to build end-to-end from a commit to smoke test
-    * [BIGTOP-2996] - Update Smoke Test CI matrix against Bigtop master Distros
-    * [BIGTOP-3009] - Add gradle repo-ind
-    * [BIGTOP-3110] - [Puppet] Hive metastore can not startup successfully
-    * [BIGTOP-3114] - [Test] HBase Importtsv smoke tests are failing
-    * [BIGTOP-3115] - [Puppet] Alluxio master/worker can not startup successfully
-    * [BIGTOP-3117] - [Puppet] Can't run spark commands for Spark on Yarn deployment
-    * [BIGTOP-3118] - [Puppet] Failed to deploy GPDB
-    * [BIGTOP-3120] - [Puppet] Annoying warning message from Ambari snippet
-    * [BIGTOP-3121] - [Provisioner] Can't create docker containers after using Docker Provisioner on Amazon Linux
-    * [BIGTOP-3122] - CI matrix for smoke tests
-    * [BIGTOP-3124] - Remove package installation in provisioner/utils/smoke-tests.sh and polish the script
-    * [BIGTOP-3126] - [Puppet] Failed to deploy QFS due to Permission denied error at initailization
-    * [BIGTOP-3129] - Failed to run QFS smoke test
-    * [BIGTOP-3130] - Failed to run Alluxio smoke test
-    * [BIGTOP-3131] - Apex smoke test requires mvn to compile test jar
-    * [BIGTOP-3132] - Bump Alluxio up to 1.8.1
-    * [BIGTOP-3133] - [Puppet] Add a module to deploy bigtop-utils
-    * [BIGTOP-3134] - [Puppet] An upgrade to deploy Alluxio 1.8.1
-    * [BIGTOP-3136] - [Provisioner] Local built repo should have higher priority
-    * [BIGTOP-3138] - Maven installation failed on debian-9
-    * [BIGTOP-3139] - Failed to deploy Ambari on CentOS/Fedora cause no service command available
-    * [BIGTOP-3140] - [Test] TestHadoopExamples.groovy can have $USER = null when running in Docker
-    * [BIGTOP-3141] - Release number is not honored by the build system
-    * [BIGTOP-3142] - Better user expereince when calling docker-hadoop.sh -d with no cluster running
-    * [BIGTOP-3144] - [Test] QFS smoke test failed on multi-node cluster
-    * [BIGTOP-3146] - gradlew is not available in bigtop-tests/smoke-tests for running run_itest.sh
-    * [BIGTOP-3147] - Nexus proxy cache configuration is broken
-    * [BIGTOP-3148] - Error out when configure-nexus encounter any issue
-    * [BIGTOP-3149] - Support to enable Nexus proxy for Docker Provisioner
-    * [BIGTOP-3150] - [Provisioner] Raise proper exit code and error messages when provisioning failed
-    * [BIGTOP-3153] - Switch to docker cp based solution to build inside containers
-    * [BIGTOP-3154] - [Provisioner] Failed to bootstrap provisioning environment on Debian/Ubuntu
-    * [BIGTOP-3155] - Setup manual smoke test for Pull Requests
-    * [BIGTOP-3157] - Failed to start Ambari server and agent on Fedora 26
-    * [BIGTOP-3158] - [Test] Fix test specification with maven failsafe plugin
-    * [BIGTOP-3159] - Failed to deploy Alluxio due to Puppet compatibility issue
-    * [BIGTOP-3160] - [Provisioner] Treat both exit code 0 and 2 as success for Puppet execution
-    * [BIGTOP-3161] - Upgrade to Puppet 5.X
-    * [BIGTOP-3163] - Bump Hadoop to 2.8.5
-    * [BIGTOP-3164] - Bump Kafka to 1.1.1
-    * [BIGTOP-3166] - Update Kerberos module to comply with Puppet's syntax
-    * [BIGTOP-3167] - Nexus is always enabled for ./gradlew XXX-ind
-    * [BIGTOP-3169] - Enable local repo is not working on yum systems
-    * [BIGTOP-3170] - Loop and wait for daemon to be started up for Alluxio
-    * [BIGTOP-3171] - Update Kafka Puppet module for version 1.1.1
-    * [BIGTOP-3172] - [Provisioner] Support to specify components and smoke-tests when launching Docker Provisioner
-    * [BIGTOP-3173] - Fix bugs and improve usability of COMPONENT-pkg-ind
-    * [BIGTOP-3174] - ./gradlew repo-ind failed on Bigtop build slaves
-    * [BIGTOP-3175] - [Build] Support to specify commit SHA1 when building packages
-    * [BIGTOP-3176] - [Build] Support non X86 archs for COMPONENT-pkg-ind
-    * [BIGTOP-3177] - [Build] Exit w/o container left dangling when running docker in gradle wrapper
-    * [BIGTOP-3179] - Connection refused by www-us.apache.org mirror site
-    * [BIGTOP-3180] - Gradle 5 incompatible issue
-    * [BIGTOP-3181] - Build as non-root for COMPONENT-pkg-ind
-    * [BIGTOP-3182] - Helper message and documentation of new XXX-ind features
-    * [BIGTOP-3183] - Zeppelin build failed
-    * [BIGTOP-3185] - Bump Kafka to 2.1.1
-    * [BIGTOP-3186] - Bump Spark to 2.2.3
-    * [BIGTOP-3187] - Bump Flume to 1.9.0
-    * [BIGTOP-3189] - Allow to specify more properties/options at command line for docker provisioner
-    * [BIGTOP-3190] - Compatibility issue for Zeppelin with Spark 2.2.3
-    * [BIGTOP-3191] - Old Gradle version causes Kafka 2.1.1 build failed
-    * [BIGTOP-3192] - Remove Spark 1.6 from the stack
-    * [BIGTOP-3193] - Add REPO and MEMORY as options for docker-provisioner
-    * [BIGTOP-3195] - Add pkgs-ind to package all artifacts inside docker
-    * [BIGTOP-3196] - Drop Apache Crunch in Bigtop distribution
-    * [BIGTOP-3197] - [Docker] Treat both exit code 0 and 2 as success for Puppet execution
-    * [BIGTOP-3198] - [Sandbox] Support AARCH64/PPC64LE
-    * [BIGTOP-3199] - Can not run smoke test inside docker-provisioner
-    * [BIGTOP-3200] - Bugfix Integration test framework 2.0 and documentation
-    * [BIGTOP-3203] - [Sandbox] Architecture string is a mess for Distributions
-    * [BIGTOP-3206] - Missing /etc/init.d/functions when deploying Alluxio on Fedora 26
-    * [BIGTOP-3207] - [Provisioner] Do not exit when destroying got no cluster exists
-    * [BIGTOP-3209] - Revert Kafka to 0.10.2.2 and Flume to 1.8.0
-    * [BIGTOP-3210] - Missing /lib/lsb/init-functions when deploying Flink on Fedora 26
-    * [BIGTOP-3212] - Deploy QFS client failed due to no such directory error
-    * [BIGTOP-3213] - Bugs in pom.xml files when releasing Bigtop 1.4.0
-
-
-** Bug
-    * [BIGTOP-2595] - Make Spark1 and Spark2 coexist
-    * [BIGTOP-2892] - The download page should provide instructions to check hashes and signatures
-    * [BIGTOP-2986] - Oozie build is failing
-    * [BIGTOP-3099] - Fix oozie build
-    * [BIGTOP-3102] - docker-hadoop.sh WARNING: The scale command is deprecated
-    * [BIGTOP-3107] - QFS cannot compile with newer Oracle SDK
-    * [BIGTOP-3112] - Update release KEYS file from dist.apache.org to apache.org/dist
-    * [BIGTOP-3113] - Update previous releases links to the archive.apache.org in the download page
-    * [BIGTOP-3125] - [provisioner] sbin/init is missed in puppet:opensuse-42.3
-    * [BIGTOP-3151] - Add flink smoke test
-    * [BIGTOP-3184] - URLConnection check fails with exception when it should return false
-    * [BIGTOP-3194] - Phoneix QueryServer PID file name in svc file is inconsistent with that in QueryServer's execution script
-
-
-** New Feature
-    * [BIGTOP-2947] - Project Frontier: Bigtop Integration Test Framework 2.0
-
-
-** Improvement
-    * [BIGTOP-1227] - Deploy artifacts to maven repository
-    * [BIGTOP-3085] - Hello world example for adding a new package
-    * [BIGTOP-3135] - Remove deprecated giraph-site.xml
-    * [BIGTOP-3152] - No need to Force the use of Netty over Hadoop RPC in Giraph
-    * [BIGTOP-3204] - Upgrade flink from 1.4.2 to 1.6.0 or higher
-
-
-
-** Task
-    * [BIGTOP-3098] - Update maven plugins for project pom
-    * [BIGTOP-3105] - Sync up site changes from branch 1.3 to master
-    * [BIGTOP-3106] - Update links in download page
-    * [BIGTOP-3108] - Update version strings to 1.3.0 in the master
-    * [BIGTOP-3109] - Overhaul the deployment and testing modules
-    * [BIGTOP-3128] - Switch to new gitbox repository
-    * [BIGTOP-3162] - Define Bigtop 1.4.0 release BOM
-    * [BIGTOP-3178] - Fix two insecure maven repositories
-
-* Release 1.3.0 (2018-8-16)
-
-Release Notes - Bigtop - Version 1.3.0
-
-** Sub-task
-    * [BIGTOP-2833] - [puppetize.sh] Support centos-7-aarch64 support
-    * [BIGTOP-2893] - Bump Hadoop to 2.8.1
-    * [BIGTOP-2894] - Bump HBASE to 1.3.1
-    * [BIGTOP-2895] - Bump Pig to 0.17.0
-    * [BIGTOP-2897] - Bump Phoenix to  4.11-HBASE-1.3
-    * [BIGTOP-2898] - Bug: regression in Phoenix build after BIGTOP-2895
-    * [BIGTOP-2899] - Bug: regression in Oozie build after BIGTOP-2895
-    * [BIGTOP-2949] - Add gradle task which leverage bigtop-ci/build.sh to build packages
-
-
-** Bug
-    * [BIGTOP-2101] - ignite-hadoop contains an arch-dependent shared lib
-    * [BIGTOP-2213] - tez build downloads amd64 nodejs executable
-    * [BIGTOP-2292] - Remove Centos 6 Support
-    * [BIGTOP-2503] - Solr packaging is broken on both DEB and RPM
-    * [BIGTOP-2634] - package perl-Env does not exist in centos 6
-    * [BIGTOP-2679] - Streamline CI Jobs
-    * [BIGTOP-2723] - Fix asciidoctor-maven-plugin for HBase build on AArch64
-    * [BIGTOP-2737] - Spark charm doesn't handle HA or examples well
-    * [BIGTOP-2738] - spark-worker fails to start
-    * [BIGTOP-2748] - Fix a puppet compatibilty issue
-    * [BIGTOP-2749] - puppet: use jessie package on jessie, not trusty
-    * [BIGTOP-2750] - puppet: increase compatibility with future versions
-    * [BIGTOP-2753] - Initial support for Debian-9
-    * [BIGTOP-2754] - Revert BIGTOP-2730: Upgrade Zookeeper to version 3.4.10
-    * [BIGTOP-2804] - Drop Solr package and Puppet snippets
-    * [BIGTOP-2818] - Ambari downloads jdk
-    * [BIGTOP-2826] - Zeppelin RPM is broken: "missing" osgi package deps
-    * [BIGTOP-2829] - [iTest] build failed during Maven integration test phase
-    * [BIGTOP-2832] - Toolchain failed to install on Debian 8
-    * [BIGTOP-2835] - puppet fails when bigtop::jdk_preinstalled is true
-    * [BIGTOP-2836] - charm metric collector race condition
-    * [BIGTOP-2838] - Support ARM64 for  packages.gradle
-    * [BIGTOP-2841] - Failed to build bigtop/slaves-ubuntu-16.04-aarch64
-    * [BIGTOP-2844] - zeppelin charm: spark config should match zeppelin spark config
-    * [BIGTOP-2847] - Building from git is broken
-    * [BIGTOP-2850] - Not able to build bigtop/slaves image for OpenSuSE 42.1
-    * [BIGTOP-2859] - Missing HIVE_VERSION when build hive rpms
-    * [BIGTOP-2860] - Fix TestSpark.groovy syntax error
-    * [BIGTOP-2863] - Test build failed for incompatible JavaDoc format with JDK-8-121 and later
-    * [BIGTOP-2864] - Tez with "Class path contains multiple SLF4J bindings" warnings
-    * [BIGTOP-2866] - Fix rmr depricated and people* No such file or directory error
-    * [BIGTOP-2878] - Download task does not work for git repositories
-    * [BIGTOP-2881] - Greenpulm 5.0 got released - lets switch the Bigtop to it
-    * [BIGTOP-2885] - gpdb: disable gpcloud on CentOS 6
-    * [BIGTOP-2890] - Download page must not link to snapshots / nightly builds
-    * [BIGTOP-2891] - Download page must link to ASF mirrors
-    * [BIGTOP-2903] - update protobuf rpm links
-    * [BIGTOP-2906] - Failed to start solr-server service using bigtop-deploy
-    * [BIGTOP-2907] - upgrading Ambari from 2.5.0 to 2.5.2
-    * [BIGTOP-2908] - giraph charm: release audit failure
-    * [BIGTOP-2909] - ppc64le: Zeppelin 0.7.2 build is failing
-    * [BIGTOP-2911] - Change Solr service ports to default 8983/8984
-    * [BIGTOP-2912] - Intialize dataset for solr smoke test
-    * [BIGTOP-2915] - Some files are missing ALv2 header
-    * [BIGTOP-2917] - Ignite smoke test failed with JDK8
-    * [BIGTOP-2926] - introduce bigtop version into bigtop/puppet images
-    * [BIGTOP-2929] - Fix typo in opensuse version name
-    * [BIGTOP-2930] - sqoop fedora26 : rsync missing
-    * [BIGTOP-2931] - hadoop debian-9 : Unmet build dependencies: libssl-dev
-    * [BIGTOP-2932] - hadoop fedora26: openssl in native code problem
-    * [BIGTOP-2934] - Consolidate puppet usage by installing puppet from distro
-    * [BIGTOP-2935] - provisioner: Use proper command line depending on puppet version
-    * [BIGTOP-2942] - update tez to 0.9
-    * [BIGTOP-2943] - hbase does not compile with maven-3.5.2
-    * [BIGTOP-2944] - Update hbase and fix compilation issue
-    * [BIGTOP-2950] - BIGTOP-2869 breaks RAT check
-    * [BIGTOP-2953] - qfs is not buildable under Debian 9
-    * [BIGTOP-2957] - Upgrade YCSB to 0.12.0
-    * [BIGTOP-2958] - Tez should use appropriate profile for Hadoop shims
-    * [BIGTOP-2968] - Bump Hive version to 2.3.2
-    * [BIGTOP-2973] - Hama packaging is broken with Hadoop 2.8+
-    * [BIGTOP-2976] - zookeeper-rest missing dependency on lsb-base
-    * [BIGTOP-2979] - JAVA_HOME inconsistent on non-x86 architectures
-    * [BIGTOP-2980] - Hama does not build on DEB type systems
-    * [BIGTOP-2981] - Packaging SparkR is broken due to BIGTOP-2959
-    * [BIGTOP-2984] - Rat check failed after BIGTOP-2698
-    * [BIGTOP-2987] - Phoenix build break
-    * [BIGTOP-2990] - Upgrade Phoenix version to 4.13.1-HBase-1.3
-    * [BIGTOP-2991] - Bump up Spark version to 2.2.1
-    * [BIGTOP-3001] - Change uid and gid for jenkins user in bigtop-toolchain
-    * [BIGTOP-3003] - Ant-1.9.9 tarball is removed from Apache mirrors
-    * [BIGTOP-3004] - Fix HBase build failure on Debian/Fedora
-    * [BIGTOP-3013] - kafka charm: fail to deploy using Bigtop-trunk-repos
-    * [BIGTOP-3014] - juju: use charm-env for shebangs
-    * [BIGTOP-3018] - detect-javahome script has a typo in variable name
-    * [BIGTOP-3023] - Bump qfs to 2.0.0
-    * [BIGTOP-3024] - Zeppelin build is failed after Spark is bumpped to 2.2.1
-    * [BIGTOP-3025] - ci.bigtop.apache.org certificate is expired and causes errors
-    * [BIGTOP-3026] - 404 error: http://ci.bigtop.apache.org/view/Packages/job/Bigtop-trunk-packages/
-    * [BIGTOP-3027] - Wrong Leveldbjni native binary for aarch64
-    * [BIGTOP-3030] - Fix Ambari build failure on non-x86 platforms
-    * [BIGTOP-3033] - Spark build on OpenSUSE is failed
-    * [BIGTOP-3035] - Provisioner failed because init is missed in Debian-9
-    * [BIGTOP-3036] - Download gradle and groovy via TLS and do signature checking on ant
-    * [BIGTOP-3037] - Download maven securely
-    * [BIGTOP-3040] - Solr's default configsets doesn't work with smoke test cases
-    * [BIGTOP-3041] - Failed to init hadoop hdfs using init-hdfs.sh
-    * [BIGTOP-3042] - HDFS TestHDFSQuota doesn't match Hadoop's definition
-    * [BIGTOP-3044] - Fix docker build command for simple sh
-    * [BIGTOP-3045] - Remove insecure maven repository URL from gradle config
-    * [BIGTOP-3046] - Not correct auto find latest maven dist
-    * [BIGTOP-3048] - Revert BIGTOP-3001 to bring packaging CI back
-    * [BIGTOP-3050] - cgroups: cannot found cgroup mount destination: unknown
-    * [BIGTOP-3052] - Maven version is fixed in security verification code
-    * [BIGTOP-3053] - Tez failed to build due to bower version is deprecated
-    * [BIGTOP-3054] - Missing Spark archive caused Zeppelin build failure
-    * [BIGTOP-3061] - Crunch build failure
-    * [BIGTOP-3076] - QFS build failed on ppc64le
-    * [BIGTOP-3077] - OpenSuse-42.3 packages install error
-    * [BIGTOP-3078] - ignite-shmem failed to build on ppc64le
-    * [BIGTOP-3081] - Update HBase/Ignite-hadoop arch info
-    * [BIGTOP-3082] - Fix build failure with flume-1.8+kafka-0.10.2.2
-    * [BIGTOP-3083] - HBase and iginite-hadoop build failed on Fedora-26 due to BIGTOP-3081
-    * [BIGTOP-3088] - provisioner failed to run puppet deployment on CentOS-7
-    * [BIGTOP-3089] - Update provision config files with changes in 1.3.0 release
-    * [BIGTOP-3090] - provisioner failed on fedora-26 when deploying jdk
-    * [BIGTOP-3091] - Set Bigtop repo to higher priority
-
-
-** New Feature
-    * [BIGTOP-2868] - Travis integration for Jar files.
-    * [BIGTOP-3007] - expose hive config options for zookeeper
-
-
-** Improvement
-    * [BIGTOP-2698] - Build/install protobuf-2.5 from source
-    * [BIGTOP-2730] - Bump zookeeper to 3.4.10
-    * [BIGTOP-2784] - Bump version of Solr to 6.5.1
-    * [BIGTOP-2787] - [bigtop-slaves] Support of CentOS 7 ARM64 bigtop-slaves
-    * [BIGTOP-2794] - [bigtop-puppet] Support CentOS 7 AArch64 bigtop puppet
-    * [BIGTOP-2809] - Support R integration for Spark
-    * [BIGTOP-2810] - Support R integration for Zeppelin
-    * [BIGTOP-2811] - Add R to toolchain for Spark and Zeppelin
-    * [BIGTOP-2819] - Polish the README.md for 1.2.1 release
-    * [BIGTOP-2824] - [sandbox] Support CentOS 7 AArch64 sandbox
-    * [BIGTOP-2825] - Upgrade gradle to the latest 4.0
-    * [BIGTOP-2834] - spark charm: refactor for restricted networks; lib cleanup
-    * [BIGTOP-2839] - Bump Hadoop version to 2.7.4
-    * [BIGTOP-2843] - Add provisioner config yaml for ubuntu-16.04-aarch64
-    * [BIGTOP-2846] - Add DSL documentation for GIT-based builds
-    * [BIGTOP-2848] - Add provisioner docker yaml config file CentOS-7 AArch64
-    * [BIGTOP-2851] - [bigtop-puppet] Add Debian 8 AArch64 bigtop puppet
-    * [BIGTOP-2852] - [bigtop-slaves] Add Debian 8 AArch64 bigtop-slaves
-    * [BIGTOP-2853] - [sandbox] Add Debian 8 AArch64 sandbox
-    * [BIGTOP-2854] - Add provisioner docker yaml config file Debian 8 AArch64
-    * [BIGTOP-2856] - [sandbox] Add a condition in detect_repo()
-    * [BIGTOP-2857] - Add aarch64 support for fedora-25
-    * [BIGTOP-2858] - Add AArch64 support for Debian 9
-    * [BIGTOP-2867] - spark charm: make bigtop version configurable
-    * [BIGTOP-2869] - Bump solr to 6.6.0
-    * [BIGTOP-2872] - Replace aarch64 orgnanization with the more-specific arm64v8 organization in Official docker
-    * [BIGTOP-2874] - juju bundle refresh (august 2017)
-    * [BIGTOP-2875] - giraph charm: update metadata/readme
-    * [BIGTOP-2882] - Bump Hadoop and all dependencies to actual releases
-    * [BIGTOP-2889] - Remove HADOOP_HOME_WARN_SUPPRESS setting
-    * [BIGTOP-2910] - zeppelin charm: support bigtop upgrade
-    * [BIGTOP-2914] - GPDB 5.1.0 has been release. Let's bump that
-    * [BIGTOP-2916] - fix deprecated gradle syntax
-    * [BIGTOP-2918] - Update distributions for Bigtop-1.3
-    * [BIGTOP-2919] - Update maven to 3.5.2
-    * [BIGTOP-2920] - Cleanup puppet recipies: remove deprecated platforms
-    * [BIGTOP-2922] - Drop arch specific bigtop-* images
-    * [BIGTOP-2925] - Missing license header for .travis.yml
-    * [BIGTOP-2936] - Add RedHat default Oracle Java install location to detection script
-    * [BIGTOP-2959] - Add SPARK_DIST_CLASSPATH to include libraries for HDFS and YARN
-    * [BIGTOP-2974] - Add puppet code to deploy SparkR package
-    * [BIGTOP-2989] - Building Hadoop with a pre-downloaded Tomcat
-    * [BIGTOP-2992] - Building Pig with pre-arranged Forrest
-    * [BIGTOP-3002] - For Kafka, it should be possible to set the broker.id config and log.dirs config.
-    * [BIGTOP-3005] - Add zkpeer-relation-changed hook to zookeeper charm.
-    * [BIGTOP-3010] - juju bundle refresh (feb 2018)
-    * [BIGTOP-3011] - zookeeper: support autopurge.purgeInterval and autopurge.snapRetainCount.
-    * [BIGTOP-3031] - Auto find latest maven dist
-    * [BIGTOP-3038] - Add rollingupgrade option in hadoop-hdfs-namenode.svc
-    * [BIGTOP-3039] - Change git browse link to Github
-    * [BIGTOP-3047] - Add nagios monitoring to zookeeper charm.
-    * [BIGTOP-3055] - Bump GPDB to 5.10.0
-    * [BIGTOP-3058] - Bump flume to 1.8.0
-    * [BIGTOP-3059] - Bump Ambari to 2.6.1
-    * [BIGTOP-3060] - juju bundle refresh (august 2018)
-    * [BIGTOP-3062] - Bump flink to 1.4.2
-    * [BIGTOP-3064] - Bump HBase to 1.3.2
-    * [BIGTOP-3065] - Bump Hadoop to 2.8.4
-    * [BIGTOP-3066] - Bump tez to 0.9.1
-    * [BIGTOP-3067] - Bump Hive to 2.3.3
-    * [BIGTOP-3068] - Bump Hama to 0.7.1
-    * [BIGTOP-3069] - Bump crunch to 0.15.0
-    * [BIGTOP-3103] - Update download page to comply with Apache announcement requirements
-
-** Test
-    * [BIGTOP-2865] - HBase smoke test implementation
-
-
-** Task
-    * [BIGTOP-2739] - refresh juju bundles with latest charm revs
-    * [BIGTOP-2877] - Drop Kite packaging
-    * [BIGTOP-2884] - Upgrade gcc to support C++11 on CentOS 6
-    * [BIGTOP-2924] - Bring back 1.2.1 changes into master
-    * [BIGTOP-2945] - Define Bigtop 1.3 release BOM
-    * [BIGTOP-2969] - Bump up Spark version to 2.2.0
-    * [BIGTOP-2970] - Bump Zeppelin version to 0.7.3
-    * [BIGTOP-3006] - Add Jun He to team list
-    * [BIGTOP-3063] - [Umbrella] Efforts for 1.3.0 release
-    * [BIGTOP-3074] - Drop oozie packaging
-    * [BIGTOP-3075] - Drop pig packaging
-    * [BIGTOP-3106] - Update links in download page
-
-* Release 1.2.1 (2017-10-24)
-
-Release Notes - Bigtop - Version 1.2.1
-
-** Sub-task
-    * [BIGTOP-2165] - ignite-hadoop service doesn't start
-    * [BIGTOP-2396] - Create CI jobs for new Docker Provisioner
-    * [BIGTOP-2758] - [Sandbox] Support dryrun in build script
-    * [BIGTOP-2760] - [Sandbox] Upgrade to Bigtop 1.2
-    * [BIGTOP-2761] - Remove bigtop-deploy image build scripts
-    * [BIGTOP-2767] - Auto detect repo does not need OS code name since 1.2.0
-    * [BIGTOP-2769] - OS_TO_CODE_NAME has been removed in BIGTOP-2767
-    * [BIGTOP-2772] - [Sandbox] Add --dryrun mode into helper script and document
-    * [BIGTOP-2814] - Make provisioner config names be consistent with OS names in CI
-    * [BIGTOP-2815] - Puppet should be able to generate multiple repo files
-    * [BIGTOP-2816] - Fix provisioner config name for ubuntu-16.04
-
-** Bug
-    * [BIGTOP-2295] - Docker tests should consume current builds
-    * [BIGTOP-2679] - Streamline CI Jobs
-    * [BIGTOP-2716] - Solr build failed when OpenJDK8u_121 is used
-    * [BIGTOP-2729] - AMBARI-20686: Add ID to allow compilation with recent maven
-    * [BIGTOP-2737] - Spark charm doesn't handle HA or examples well
-    * [BIGTOP-2738] - spark-worker fails to start
-    * [BIGTOP-2740] - hbase 1.1.3 does not work on ppc64le
-    * [BIGTOP-2743] - hbase shell does not work on ppc64le
-    * [BIGTOP-2748] - Fix a puppet compatibilty issue 
-    * [BIGTOP-2749] - puppet: use jessie package on jessie, not trusty
-    * [BIGTOP-2750] - puppet: increase compatibility with future versions
-    * [BIGTOP-2751] - ambari: Storm 1.1.0-SNAPSHOT is no longer available, use released version
-    * [BIGTOP-2753] - Initial support for Debian-9
-    * [BIGTOP-2754] - Revert BIGTOP-2730: Upgrade Zookeeper to version 3.4.10
-    * [BIGTOP-2755] - Gradle needs cacerts file in place on fedora
-    * [BIGTOP-2756] - Get rid of Permission Denied when creating/destroying Docker Provisioner cluster
-    * [BIGTOP-2762] - Zeppelin installation failed due to JDK not installed
-    * [BIGTOP-2763] - Add /user/zeppelin directory for running Spark job on YARN in Zeppelin 
-    * [BIGTOP-2764] - deployment failure when roles include spark::common and spark::yarn*
-    * [BIGTOP-2765] - fix roles logic for spark/zeppelin charms
-    * [BIGTOP-2766] - [Puppet] Spark worker startup failed due to default master_url is yarn
-    * [BIGTOP-2771] - ambari: build failed due to using third-party ConcurrentHashMap instead of the standard one unnecessarily
-    * [BIGTOP-2774] - gradlew toolchain does not work on Ubuntu 16.04.2/Puppet 3.8.5
-    * [BIGTOP-2775] - Make open jdk 8 available on bigtop/puppet:debian-8
-    * [BIGTOP-2781] - [Provisioner] 127.0.0.1 does not successfully inserted after BIGTOP-2756
-    * [BIGTOP-2788] - Corrects Apex patch for version 3.6.0
-    * [BIGTOP-2789] - Ambari: installing ODPi mpack fails due to changing its file name
-    * [BIGTOP-2790] - Ambari: deploying cluster fails due to ambari-agent version mismatch
-    * [BIGTOP-2793] - BIGTOP-2790 broke Ambari build on rpm-based system
-    * [BIGTOP-2796] - Bigtop Zookeeper(3.4.6) package conflict with Ubuntu 16.04 Zookeeper(3.4.8)
-    * [BIGTOP-2797] - zeppelin charm external role handling
-    * [BIGTOP-2798] - Apex component has duplicate slf4j binding
-    * [BIGTOP-2799] - [Puppet] Flink deployment failure on all supported OS
-    * [BIGTOP-2800] - provisioner fails for kerberos on centos-7
-    * [BIGTOP-2801] - charm race condition when gathering metrics
-    * [BIGTOP-2803] - Minor issues in bigtop.bom
-    * [BIGTOP-2805] - ycsb: turn of autodetection of dependencies by rpm
-    * [BIGTOP-2806] - hue is not installable on debian, ubuntu
-    * [BIGTOP-2808] - Handle deletion of symlinks: update gradle
-    * [BIGTOP-2826] - Zeppelin RPM is broken: "missing" osgi package deps
-    * [BIGTOP-2829] - [iTest] build failed during Maven integration test phase
-    * [BIGTOP-2832] - Toolchain failed to install on Debian 8
-    * [BIGTOP-2850] - Not able to build bigtop/slaves image for OpenSuSE 42.1
-    * [BIGTOP-2870] - testHCFS should be using USER_NAME for expected output
-    * [BIGTOP-2871] - Make run_itest report Standard Error from tests to stderr
-    * [BIGTOP-2873] - A few cosmetic changes to run_itest.sh output
-    * [BIGTOP-2879] - BIGTOP-2749 breaks deployments on Debian-8
-    * [BIGTOP-2890] - Download page must not link to snapshots / nightly builds
-    * [BIGTOP-2891] - Download page must link to ASF mirrors
-    * [BIGTOP-2900] - Crunch build failed because of OS OOM killer on OpenJDK 1.8.0-144
-    * [BIGTOP-2901] - disable GPDB because of build failure on OpenSuSE 42.1
-    * [BIGTOP-2903] - update protobuf rpm links
-    * [BIGTOP-2908] - giraph charm: release audit failure
-
-** Improvement
-    * [BIGTOP-2355] - Update Mahout version to 0.13.0
-    * [BIGTOP-2677] - layer-spark: Improve sparkpi action output
-    * [BIGTOP-2730] - Bump zookeeper to 3.4.10
-    * [BIGTOP-2770] - Juju charm/bundle refresh
-    * [BIGTOP-2777] - make hbase charm more robust
-    * [BIGTOP-2778] - Delete PermSize / MaxPermsize options, no longer support by Java 8
-    * [BIGTOP-2783] - new charm icons for zookeeper and zeppelin
-    * [BIGTOP-2795] - spark charm: fix sparkpi and rework start/stop logic
-    * [BIGTOP-2802] - Some packages don't create necessary groups on debian-based system if the users correspond to them already exist
-    * [BIGTOP-2807] - Upgrade Spark to 2.1.1
-    * [BIGTOP-2812] - Upgrade Zeppelin version to 0.7.2
-    * [BIGTOP-2819] - Polish the README.md for 1.2.1 release
-    * [BIGTOP-2821] - expose extra config options for spark
-    * [BIGTOP-2827] - juju bundle refresh (june 2017)
-    * [BIGTOP-2828] - Since BIGTOP-2775 JDK version has been specified by Bigtop Puppet
-
-** New Feature
-    * [BIGTOP-2253] - Rewrite Bigtop Docker Provisioner to use native solutions and support multi-host cluster deployment
-    * [BIGTOP-2779] - new hive charm
-    * [BIGTOP-2822] - spark charm: leverage puppet config, gpu enablement
-
-** Task
-    * [BIGTOP-2739] - refresh juju bundles with latest charm revs
-    * [BIGTOP-2747] - new charm revs for bigtop-1.2
-    * [BIGTOP-2776] - Bump Apache Apex version to 3.6.0
-    * [BIGTOP-2785] - Define Bigtop 1.2.1 release BOM
-    * [BIGTOP-2786] - Push Bigtop 1.2.1 Docker build slaves to Docker Hub
-    * [BIGTOP-2877] - Drop Kite packaging
-
-* Release 1.2.0 (2017-04-04)
-
-Release Notes - Bigtop - Version 1.2.0
-
-** Sub-task
-    * [BIGTOP-1406] - package Ambari in Bigtop
-    * [BIGTOP-1408] - create basic end-to-end tests for Ambari integration
-    * [BIGTOP-1409] - consider using ambari shell
-    * [BIGTOP-1624] - Add puppet recipes for deploying kafka
-    * [BIGTOP-2179] - Apache Tajo to bigtop: packaging as deb/rpm
-    * [BIGTOP-2180] - Apache Tajo to bigtop: make tests
-    * [BIGTOP-2224] - Let's bump groovy version for smokes to 2.4.10
-    * [BIGTOP-2254] - Replace the Docker orchestration tool from Vagrant to Docker Compose
-    * [BIGTOP-2285] - Add qfs rpm and debian packaging code
-    * [BIGTOP-2293] - Add puppet recipes for qfs components
-    * [BIGTOP-2312] - Add environment check
-    * [BIGTOP-2314] - Added deb and rpm package of Apache Apex to bigtop.
-    * [BIGTOP-2315] - Add smoke test for apex
-    * [BIGTOP-2316] - Add apex puppet receipes
-    * [BIGTOP-2317] - Add smoke tests for QFS
-    * [BIGTOP-2319] - Build initial smoke-tests distribution 
-    * [BIGTOP-2326] - Build salves need to have the toolchain run again to install qfs dependencies
-    * [BIGTOP-2345] - Create Flink packaging
-    * [BIGTOP-2357] - Create puppet recipes
-    * [BIGTOP-2468] - Add Juju hadoop-processing bundle
-    * [BIGTOP-2469] - Add cloud-weather-report test plan
-    * [BIGTOP-2491] - Update provisioner/docker-hadoop.sh to bind with provisioner/utils
-    * [BIGTOP-2492] - Split flink debian packaging
-    * [BIGTOP-2505] - Support systemd containers, clean up hiera.yaml handling, fix exec (-e) flag
-    * [BIGTOP-2518] - Add node to the build slave configuration
-    * [BIGTOP-2526] - Bump flink version to 1.1.3
-    * [BIGTOP-2530] - Create Greenplum packages
-    * [BIGTOP-2531] - Create Greenplum deployment scripts
-    * [BIGTOP-2532] - Create Greenplum test
-    * [BIGTOP-2600] - Bump Groovy version to 2.4.10
-    * [BIGTOP-2601] - Bump Ignite to 1.9
-    * [BIGTOP-2613] - create bigtop/puppet docker image or fedora 25
-    * [BIGTOP-2614] - create bigtop/slave docker image for fedora 25
-    * [BIGTOP-2620] - Bump oozie version to 4.3.0
-    * [BIGTOP-2624] - Bump Phoenix version to 4.9.0
-    * [BIGTOP-2625] - update crunch for JAVA 8
-    * [BIGTOP-2626] - HBase build fails when JAVA 8 is used
-    * [BIGTOP-2627] - Kite 1.1.0 build fails when JAVA 8 is used
-    * [BIGTOP-2646] - Move Vagrant Provisioner from bigtop-deploy/vm to provisioner directory
-    * [BIGTOP-2647] - Clean up code under bigtop-deploy/vm after everything moved to provisioner directory
-    * [BIGTOP-2657] - Upgrade to Hue 3.11.0
-    * [BIGTOP-2668] - Polish and update Docker Provisioner configurations
-    * [BIGTOP-2672] - Update gradle wrapper for Docker Provisioner
-    * [BIGTOP-2700] - Adding tests to test the doas feature of httpfs
-    * [BIGTOP-2701] - Update hcfs tests so it can work in both casers where fs.trash.interval is disabled or enabled.
-    * [BIGTOP-2702] - Fix Sandbox creation script
-    * [BIGTOP-2704] - Include ODPi runtime tests option into the battery of smoke tests
-    * [BIGTOP-2705] - provide puppet deployment code for Ambari
-
-
-
-
-
-
-
-** Bug
-    * [BIGTOP-976] - package deployment tests are CDH specific: _has_ to be removed or fixed
-    * [BIGTOP-1533] - failed to load/initialize native-bzip2 library system-native
-    * [BIGTOP-2047] - detect JAVA_HOME for JDK8, remove obsolete detection code (JDK6/oracle JDK)
-    * [BIGTOP-2133] - Running BPSGenerator using fat-jar fails on Mac OS X
-    * [BIGTOP-2134] - Wrong package name in bigpetstore-mapreduce/arch.dot
-    * [BIGTOP-2135] - PigCSVCleaner fails due to the lack of the dependent jar
-    * [BIGTOP-2136] - A comment about parameter substitution in BPS_analytics.pig is slightly wrong
-    * [BIGTOP-2138] - deb: make bigtop-jsvc a wrapper package for jsvc package
-    * [BIGTOP-2220] - flume-agent.init incorrectly handles flume.conf
-    * [BIGTOP-2225] - Bump toolchain gradle to 2.10
-    * [BIGTOP-2229] - bigtop deploy to support centos-7
-    * [BIGTOP-2231] - build.gradle carries one too many sets of repositories
-    * [BIGTOP-2261] - adding bigtop/puppet:fedora-22 build support for ppc64le
-    * [BIGTOP-2274] - CLONE - rpm: need to make bigtop-jsvc a wrapper package for jsvc package
-    * [BIGTOP-2301] - Bigtop Homepage shows wrong url to CI
-    * [BIGTOP-2302] - Use apt instead of yum in setup-env-debian.sh
-    * [BIGTOP-2303] - Fix the indentation in docker-hadoop.sh
-    * [BIGTOP-2308] - Clean up build directory after sucessfull build of package
-    * [BIGTOP-2318] - Release assembly needs to be updated
-    * [BIGTOP-2340] - BIGTOP-2319 is incomplete: the code for smoke-tests is missing
-    * [BIGTOP-2342] - Set yarn.log.server.url to point to JH server
-    * [BIGTOP-2346] - Do not use gradle delete() for sources and build directories
-    * [BIGTOP-2347] - Clean up build directory after sucessfull build of package (2nd try)
-    * [BIGTOP-2350] - HCatalog WebHCat server's default file incorrectly specifies HCAT_PREFIX AND HADOOP_PREFIX
-    * [BIGTOP-2353] - fix qfs suse build
-    * [BIGTOP-2354] - qfs does not build on fedora
-    * [BIGTOP-2358] - Update the URL for Bigtop-trunk-packages job on README
-    * [BIGTOP-2359] - Add .DS_Store to .gitignore
-    * [BIGTOP-2362] - Enabling bigtop docker provisioner for ppc64le (ubuntu 15.04)
-    * [BIGTOP-2366] - Support https:// repositories for DEB systems
-    * [BIGTOP-2367] - QFS packages do not handle /var/run/qfs properly
-    * [BIGTOP-2372] - Puppet deploy README points to non-existing repo
-    * [BIGTOP-2373] - kinit path is not correctly configured in hue.ini template for CentOs by puppet module
-    * [BIGTOP-2374] - Toolchain needs clear warning for Puppet <3
-    * [BIGTOP-2376] - Update Tomcat and remove obsolete downloads 
-    * [BIGTOP-2377] - Update to Hadoop 2.7.2
-    * [BIGTOP-2378] - Do not use archive.apache.org for ant download
-    * [BIGTOP-2379] - update maven to 3.3.9 and refactor toolchain
-    * [BIGTOP-2380] - support opensuse-42.1 (leap)
-    * [BIGTOP-2382] - Parameterise and expose "default_hdfs_superuser" for hue.ini
-    * [BIGTOP-2399] - Maven artifacts are still on 2.6.0 version of Apache Hadoop
-    * [BIGTOP-2400] - Get groovy plugins used by Maven in sync with Bigtop groovy runtime
-    * [BIGTOP-2401] - a commiter name is missing in the "Who we are" page
-    * [BIGTOP-2402] - yet another commiter name is missing in the "Who we are" page
-    * [BIGTOP-2404] - puppetize.sh fails on Ubuntu 14.04 because of the lack of apt-get update
-    * [BIGTOP-2405] - Rollback BIGTOP-2049 as the original issue was fixed in the upstream
-    * [BIGTOP-2406] - init scripts do not work reliable on systemd systems
-    * [BIGTOP-2412] - arm dockerfile breaks RAT
-    * [BIGTOP-2413] - QFS README.md breaks RAT
-    * [BIGTOP-2419] - Fix symlinks on packaging Crunch rpm and deb
-    * [BIGTOP-2421] - Ignite RPM is broken: "missing" osgi package deps
-    * [BIGTOP-2423] - Fix hard-coded port number in Sqoop smokes
-    * [BIGTOP-2425] - crunch needs JDK8 javadoc
-    * [BIGTOP-2427] - jsvc build not recognizing ARM as valid platform
-    * [BIGTOP-2430] - Adding Dockerfile to build puppet image for ubuntu-16.04-ppc64le
-    * [BIGTOP-2431] - Adding Dockerfile to build slaves image for ubuntu-16.04-ppc64le
-    * [BIGTOP-2433] - mvn is missing in Bigtop Toolchain
-    * [BIGTOP-2434] - Change Spark test to unpack jar file and retrieve resources
-    * [BIGTOP-2439] - flink: fix adduser and register init.d scripts on DEB systemes
-    * [BIGTOP-2441] - spark smoke test doesn't work relying on yarn-client submittion
-    * [BIGTOP-2445] - Adding Dockerfile to build deploy image for ubuntu-16.04-ppc64le
-    * [BIGTOP-2448] - Adding Dockerfile to build puppet image for ubuntu-16.04-arm
-    * [BIGTOP-2449] - Adding Dockerfile to build slaves image for ubuntu-16.04-x86
-    * [BIGTOP-2450] - Adding Dockerfile to build slaves image for ubuntu-16.04-arm
-    * [BIGTOP-2460] -  docker-hadoop.sh fails when deploying on ubunt 16.04 images
-    * [BIGTOP-2463] - Regression: bigtop-jsvc build fails on ppc64le architecture 
-    * [BIGTOP-2464] - Outdated descriptions in vagrant-puppet-vm/README.md
-    * [BIGTOP-2471] - qfs does not build on opensuse:42.1
-    * [BIGTOP-2472] - zeppelin puppet module needs to expose ZEPPELIN_WAR_TEMPDIR
-    * [BIGTOP-2473] - Mahout puppet script fails to find Package[hadoop]
-    * [BIGTOP-2474] - Improve handling of gradle home
-    * [BIGTOP-2487] - Alluxio /var/log/alluxio is not created 
-    * [BIGTOP-2488] - Redundant zookeeper/lib directory
-    * [BIGTOP-2494] - Use systemd within bigtop/deploy docker image for debian-8, centos-7, fix centos-7 provisioner
-    * [BIGTOP-2495] - hive-hbase package is broken
-    * [BIGTOP-2509] - Path error in bigtop-packages/src/common/solr/tomcat-deployment.sh
-    * [BIGTOP-2512] - Wrong help message in docker-hadoop.sh
-    * [BIGTOP-2513] - Some RAT exclude rules defined in build.gradle are not defined in pom.xml
-    * [BIGTOP-2523] - Incorrect cycle dependency during puppet deployment causes error at Debian 8
-    * [BIGTOP-2527] - Recreate all bigtop docker images and install "host" on debian
-    * [BIGTOP-2528] - provisioner fails to add short host name
-    * [BIGTOP-2529] - Vagrant Provisioner failed to start up daemons
-    * [BIGTOP-2533] - Use TLS for downloading sources
-    * [BIGTOP-2535] - frontend-maven-plugin fails on ppc64le
-    * [BIGTOP-2536] - tarball.destination is ignored when set
-    * [BIGTOP-2537] - Hive smoke test identifies as org.apache.bigtop.itest.hadoop.mapreduce
-    * [BIGTOP-2542] - Some files related to docker provisioner breaks RAT
-    * [BIGTOP-2543] - Some minor problems on provisioner/docker/docker-hadoop.sh
-    * [BIGTOP-2545] - Add ZOOKEEPER-2594 to bigtop
-    * [BIGTOP-2546] - Make debian/ubuntu toolchain install more resistent agains failures
-    * [BIGTOP-2549] - "Expected" and "actual" values are mistakenly switched in some JUnit method invocation
-    * [BIGTOP-2551] - docker-hadoop.sh --create fails with "Error: Could not match |$index" message
-    * [BIGTOP-2553] - namenode ports are not configured for non-HA mode
-    * [BIGTOP-2556] - Building Hadoop fails because Tomcat 6.0.45 is not downloadable from Apache mirror
-    * [BIGTOP-2557] - BIGTOP-2536 broke DataFu, QFS and YCSB
-    * [BIGTOP-2558] - Add ppc64le and arm64 node to the build slave configuration
-    * [BIGTOP-2559] - Revert JIRA BIGTOP-2535
-    * [BIGTOP-2560] - Spark charm failing automated tests
-    * [BIGTOP-2563] - zeppelin build fails on ppc64le
-    * [BIGTOP-2564] - HBase build fails on Power
-    * [BIGTOP-2565] - upgrade Spark to version 1.6.2
-    * [BIGTOP-2566] - Specify KAFKA_VERSION in build command for Flume
-    * [BIGTOP-2569] - Spark 2.0
-    * [BIGTOP-2570] - ease hadoop charm debugging
-    * [BIGTOP-2582] - Adding Spark1 components for Spark 1.x family 
-    * [BIGTOP-2585] - Zookeeper service does not start if kerberos is disabled
-    * [BIGTOP-2587] - vagrant-puppet-vm fails with "Could not find class node_with_components"
-    * [BIGTOP-2588] - Spark 2.0.1 installation fails on DEB
-    * [BIGTOP-2589] - spark1 build break
-    * [BIGTOP-2590] - Flume build break on RPM
-    * [BIGTOP-2591] - Fix bug in BIGTOP-2569
-    * [BIGTOP-2593] - Build fail caused by tomcat 6.0.45 version
-    * [BIGTOP-2594] - Workaround to fix Hive Build, by using SPARK1 Version
-    * [BIGTOP-2596] - Phoenix build fails
-    * [BIGTOP-2597] - crunch build failed
-    * [BIGTOP-2599] - Zeppelin 0.6.2 build fails on Power
-    * [BIGTOP-2602] - ignite-hadoop build break
-    * [BIGTOP-2603] - NN/RM charm should include a spark user/group
-    * [BIGTOP-2604] - Build flink-dist with proper shading
-    * [BIGTOP-2605] - Addendum for BIGTOP-2514
-    * [BIGTOP-2606] - spark1 build fails when RPM is sellected
-    * [BIGTOP-2608] - upgrade Hadoop to 2.7.3
-    * [BIGTOP-2609] - upgrade Kafka to version 0.10.0.0
-    * [BIGTOP-2610] - upgrade mahout to version 0.12.2
-    * [BIGTOP-2612] - Need bigtop/puppet image for Fedora-25
-    * [BIGTOP-2618] - bigtop-jsvc build fails with OpenJDK-1.8
-    * [BIGTOP-2621] - JAVA_HOME is not set on Fedora-25+JAVA-1.8 for x86 
-    * [BIGTOP-2623] - Zeppelin 0.6.2 build fails on all platforms
-    * [BIGTOP-2628] - sqoop2 build fails when JAVA 8 is used
-    * [BIGTOP-2632] - bigtop 1.1.0 sqoop2 wrote rpm error
-    * [BIGTOP-2635] - ubuntu on ppc64le is missing a apt-get update
-    * [BIGTOP-2636] - Fix giraph after upgrade to Hadoop-2.7.3
-    * [BIGTOP-2637] - Fix flume because of kafka 0.10 update
-    * [BIGTOP-2638] - Update to kafka 0.10.1.0
-    * [BIGTOP-2640] - Incomplete patch BIGTOP-2635 - Bigtop stack build fails on Ubuntu 16.04-ppc64le
-    * [BIGTOP-2643] - Force giraph to compile by removing findbugs from all pom.xml
-    * [BIGTOP-2644] - CI compiles random greenplum source
-    * [BIGTOP-2648] - Gradle 2.7 SSL Problems
-    * [BIGTOP-2649] - Default Java8 blocks default debian jdk
-    * [BIGTOP-2650] - Docker build should pull up to date packages
-    * [BIGTOP-2651] - Delete obsolete docker images and build files.
-    * [BIGTOP-2652] - tajo rpm packaging seriously broken
-    * [BIGTOP-2653] - upgrading to Spark 2.1
-    * [BIGTOP-2654] - spark 2.1 binaries need either SPARK_HOME or non existing find-spark-home exe
-    * [BIGTOP-2655] - Help spark find the native hadoop libs
-    * [BIGTOP-2656] - regression - spark 2.1 upgrade breaks rpm packaging 
-    * [BIGTOP-2661] - Docker deploy images should pull upstream images
-    * [BIGTOP-2662] - provisioner: Add support for JDK8 on debian
-    * [BIGTOP-2663] - puppet hadoop module: Consolidate memory resource settings 
-    * [BIGTOP-2664] - create provisioner test for trunk packages with jdk8
-    * [BIGTOP-2665] - Fix hue for opensuse
-    * [BIGTOP-2669] - Create bigtop-1.2 repository 
-    * [BIGTOP-2673] - Need to back port HADOOP-11628 for JDK8
-    * [BIGTOP-2674] - docker-hadoop option -C does not work correctly any more
-    * [BIGTOP-2675] - [Puppet] Kafka can only be deployed on master node
-    * [BIGTOP-2676] - Smoke test fails on Spark 2
-    * [BIGTOP-2678] - Add hive-hbase role to install hive-hbase rpm
-    * [BIGTOP-2689] - Upgrade Zeppelin to version 0.7
-    * [BIGTOP-2690] - gradlew toolchain fails trying to download Ant 1.9.8
-    * [BIGTOP-2692] - Flume init script should load only '.conf' extension files
-    * [BIGTOP-2693] - Update readme for build and configuring git repo for packages
-    * [BIGTOP-2695] - arch.dot for bigpetstore-spark is outdated
-    * [BIGTOP-2699] - Zeppeling CI build is broken 
-    * [BIGTOP-2707] - localhost is missing in docker provisioner instances
-    * [BIGTOP-2708] - puppet code always installs hdfs by default
-    * [BIGTOP-2710] - qfs debian build is breaking because it is confused about Maven home
-    * [BIGTOP-2711] - fix rat problems
-    * [BIGTOP-2715] - Tiny typo in the README.md
-    * [BIGTOP-2718] - Mark tez package as arch dependent
-    * [BIGTOP-2719] - Mark zeppelin package as arch dependent
-    * [BIGTOP-2721] - libsnappy reference path is not correct for hadoop/ubuntu16 build
-    * [BIGTOP-2724] - putting a final nail in Java's 7 coffin
-    * [BIGTOP-2725] - a few usability improvements to run_itest.sh
-    * [BIGTOP-2726] - Bump ambari version to 2.5
-    * [BIGTOP-2727] - fix deployment tasks
-
-
-
-
-
-** Improvement
-    * [BIGTOP-1608] - Create Unified testing solution: Smoke-Tests and Test-Artifacts
-    * [BIGTOP-1843] - Upgrade Sqoop to 1.4.6
-    * [BIGTOP-2077] - Bump HBase version to 1.1
-    * [BIGTOP-2118] - Update HBase to 0.98.17
-    * [BIGTOP-2306] - Add support for specifying git repository access credentials
-    * [BIGTOP-2309] - Update submodules after git clone
-    * [BIGTOP-2327] - Bump up Phoenix to 4.7.0
-    * [BIGTOP-2330] - Bump Tachyon version to 0.8.2
-    * [BIGTOP-2332] - Upgrade Tajo version to 0.11.1
-    * [BIGTOP-2333] - Make Apache Pig work with HBase 1.1 on Hadoop 2
-    * [BIGTOP-2337] - Fix script to make deb in Tajo
-    * [BIGTOP-2351] - Vagrant VM is missing /user/vagrant HDFS directory
-    * [BIGTOP-2352] - Packaged Vagrant box has incorrect host info on restart
-    * [BIGTOP-2370] - Upgrade DataFu version to 1.3.0
-    * [BIGTOP-2381] - Making Zookeeper hosts templatize in puppet module of hue
-    * [BIGTOP-2384] - Refactor toolchain code to extract packages version out of every action
-    * [BIGTOP-2385] - Reformat bigtop_toolchain/manifests/packages.pp to ease maintenance
-    * [BIGTOP-2393] - Add ARM64 support for build environment
-    * [BIGTOP-2395] - Create separate page to point "Download now" link to
-    * [BIGTOP-2403] - Remove affiliation column in the team list
-    * [BIGTOP-2407] - Making "force_username_lowercase" variable templatize in puppet module of hue
-    * [BIGTOP-2408] - group_filter variable value is not templatize in puppet module of hue
-    * [BIGTOP-2409] - nt_domain configuration option for hue to connect to Active Directory
-    * [BIGTOP-2410] - Making "ldap_username_pattern" variable templatize in puppet module of hue
-    * [BIGTOP-2411] - Add OS-neutral wrappers to build all native packages and repositories 
-    * [BIGTOP-2416] - Upgrade Crunch to 0.13.0
-    * [BIGTOP-2429] - Add Solr to gradle smokes
-    * [BIGTOP-2443] - inject unzip method to ZipInputStream, accepting regex for include
-    * [BIGTOP-2455] - support pre-installed java environment
-    * [BIGTOP-2458] - Add option to disable IP hostname checking for DataNode registration
-    * [BIGTOP-2459] - Add option to disable vmem check for NodeManager
-    * [BIGTOP-2466] - HBase puppet scripts co-deploy ZK and Datanode along with HBase
-    * [BIGTOP-2478] - Zookeeper does not have any smoke tests
-    * [BIGTOP-2479] - Update qfs maintainer to kstinson
-    * [BIGTOP-2490] - Spark in HA when Zookeeper is available 
-    * [BIGTOP-2504] - Kafka should be able to bind to something other than 0.0.0.0/the default interface
-    * [BIGTOP-2506] - Zookeeper: non default interface for client
-    * [BIGTOP-2507] - Make buildstamp configurable by a bom file
-    * [BIGTOP-2516] - Add Zeppelin Charm
-    * [BIGTOP-2521] - Problem with deprecated methods in puppet code with puppet option parser=future
-    * [BIGTOP-2522] - Add spark processing juju bundle
-    * [BIGTOP-2547] - Don't render ip-hostname-check in hdfs-site.xml if default
-    * [BIGTOP-2548] - Refresh charms for Juju 2.0 and Xenial
-    * [BIGTOP-2554] - expose bind-host options in hieradata
-    * [BIGTOP-2555] - hadoop charms should use bind-host overrides
-    * [BIGTOP-2561] - add juju bundle for hadoop-spark
-    * [BIGTOP-2571] - cwr driven updates to the zeppelin juju charm
-    * [BIGTOP-2575] - zk charm test updates
-    * [BIGTOP-2576] - For small clusters it is useful to turn replace-datanode-on-failure off
-    * [BIGTOP-2577] - kafka charm test updates
-    * [BIGTOP-2578] - Puppet needs to be updated for Hive Server2
-    * [BIGTOP-2579] - Puppet needs to be updated for Hive Metastore service
-    * [BIGTOP-2611] - Adding Fedora-25 with Java 1.8 support
-    * [BIGTOP-2616] - refresh juju hadoop-processing bundle
-    * [BIGTOP-2617] - refresh juju spark-processing bundle
-    * [BIGTOP-2682] - update juju bundles with recent charm revs
-    * [BIGTOP-2688] - Add flag for provisioning of Yum Repo
-    * [BIGTOP-2694] - Added Hernan Vivani (hvivani) as elasticsearch maintainer
-    * [BIGTOP-2696] - Build out VM for sandboxes
-    * [BIGTOP-2697] - Back port HADOOP-12366 into Bigtop
-    * [BIGTOP-2712] - Juju CI driven updates
-    * [BIGTOP-2714] - Update Groovy in the toolchain to 2.4.10
-
-
-
-
-** New Feature
-    * [BIGTOP-1641] - Add packaging for Apache Tajo
-    * [BIGTOP-2282] - Define Bigtop 1.2 release BOM
-    * [BIGTOP-2296] - Provide a way to build Docker container with functional stack
-    * [BIGTOP-2339] - add centos-7 to the provisioiner matrix
-    * [BIGTOP-2435] - Add Juju charms for hadoop component
-    * [BIGTOP-2451] - Adding Ubuntu 16.04 support to Bigtop
-    * [BIGTOP-2452] - Adding Dockerfile to build deploy image for ubuntu-16.04-x86
-    * [BIGTOP-2476] - Add Zookeeper Charm
-    * [BIGTOP-2477] - Add Juju charm for spark component
-    * [BIGTOP-2481] - Add HBase Charm
-    * [BIGTOP-2482] - Adding Pig charm
-    * [BIGTOP-2483] - Add Mahout Charm
-    * [BIGTOP-2486] - Add Kafka Charm
-    * [BIGTOP-2524] - Add Greenplum component
-    * [BIGTOP-2615] - Provide a tool to build pseudo cluster docker images
-    * [BIGTOP-2660] - Add Giraph Charm
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-** Task
-    * [BIGTOP-2289] - Set master version to 1.2.0-SNAPSHOT
-    * [BIGTOP-2334] - Update latest release link on the website
-    * [BIGTOP-2335] - ci link should use https:// authority
-    * [BIGTOP-2368] - OpenStack config bigtop repo-url changes.
-    * [BIGTOP-2371] - Add Oozie to gradle smoke tests
-    * [BIGTOP-2414] - Rename Tachyon to Alluxio
-    * [BIGTOP-2415] - readme spelling error
-    * [BIGTOP-2456] - Update Apache Apex version to 3.4.0
-    * [BIGTOP-2497] - Upgrade Apache Phoenix to 4.8.1
-    * [BIGTOP-2500] - Remove hadoop jars from TEZ_HOME/lib and create appropriate symlinks
-    * [BIGTOP-2525] - committer test:  add myself to team-list.xml
-    * [BIGTOP-2538] - Bump version of Solr to 4.10.4
-    * [BIGTOP-2550] - Update juju hadoop bundle for Juju 2.0 and Xenial
-    * [BIGTOP-2567] - Upgrade Flume version to 1.7.0
-    * [BIGTOP-2568] - Upgrade Kafka version to 0.9.0.1
-    * [BIGTOP-2592] - Upgrade to Spark 2.0.2
-    * [BIGTOP-2622] - Add Kengo Seki to team list
-    * [BIGTOP-2629] - Add Jonathan Kelly to team list
-    * [BIGTOP-2670] - Bump up Apache Apex version to 3.5.0
-    * [BIGTOP-2687] - Remove /usr/share/java/*.jar from Sqoop2 catalina.properties
-    * [BIGTOP-2703] - refresh juju charms/bundles with metric and CI support
-    * [BIGTOP-2722] - Remove workaround allowing build by root in Tez
-
-
-
-** Test
-    * [BIGTOP-2534] - Flume tests fail with java.lang.NoClassDefFoundError: org/apache/commons/io/Charsets
-
-
-** Wish
-    * [BIGTOP-2680] - Update kafka to 0.10.1.1
-
-* Release 1.1.0 (2016-01-30)
-
-Release Notes - Bigtop - Version 1.1.0
-
-** Sub-task
-    * [BIGTOP-1494] - Introduce Groovy DSL to replace bigtop.mk
-    * [BIGTOP-1499] - released source code  is not same with source code in branch
-    * [BIGTOP-1898] - Dockerfiles to build bigtop/puppet docker images for 1.0 release
-    * [BIGTOP-1899] - Migrate CI master to EC2 instance donated by Amazon EMR team
-    * [BIGTOP-1901] - Provide bigtop/deploy images on dockerhub for CI and users to consume docker provisioner
-    * [BIGTOP-1947] - Fix RAT plugin configuration to be able to RAT-validate all published artifacts
-    * [BIGTOP-1959] - Avoid running top-level assembly while doing deploy for subprojects
-    * [BIGTOP-1965] - Remove the link to 0.8.0 from the website
-    * [BIGTOP-1973] - Add new tests for HDFS Balancer functionality
-    * [BIGTOP-1975] - Smoke tests for Spark SQL
-    * [BIGTOP-1981] - Add new tests for test, text, count commands
-    * [BIGTOP-1983] - Move BigPetStore data generator to bigtop-data-generators
-    * [BIGTOP-1984] - Extract samplers library from BigPetStore data generator
-    * [BIGTOP-1985] - Extract name generator from BigPetStore data generator
-    * [BIGTOP-1986] - Extract location dataset from BigPetStore data generator
-    * [BIGTOP-1990] - Add gradle multi-project build files for bigtop-data-generators
-    * [BIGTOP-1991] - Add BigTop Weatherman
-    * [BIGTOP-1992] - Fix RAT check errors resulting from BPS data generator move
-    * [BIGTOP-1995] - Update BigPetStore to use external locations  data
-    * [BIGTOP-2000] - Add BigTop Data Generators to maintainers file
-    * [BIGTOP-2009] - Add new tests for chgrp, cp, ls, mv, du, put, get, mkdir, stat and touchz
-    * [BIGTOP-2025] - Make BOM to be a directional graph
-    * [BIGTOP-2033] - Build order of the stack is broken 
-    * [BIGTOP-2051] - Get rid of hair-brain environment vars left after make-based build
-    * [BIGTOP-2055] - Refactor packages.gradle to get rid of excessive data containers; make code cleaner
-    * [BIGTOP-2087] - The 1.0 release package artifacts on Bigtop jenkins is broken due to CI transition
-    * [BIGTOP-2096] - ignite-hadoop service should restart upon changes in the package or configs
-    * [BIGTOP-2097] - cleaning up the ignite-service
-    * [BIGTOP-2104] - Packages upgrade to Spark 1.5.1
-    * [BIGTOP-2105] - Puppet recipes improvements after Spark is bumped to 1.5.1
-    * [BIGTOP-2107] - Ignite package still shows website at the incubator
-    * [BIGTOP-2113] - Spark master doesn&#39;t bind to the host: ignores SPARK_MASTER_IP
-    * [BIGTOP-2122] - Add zeppelin packages
-    * [BIGTOP-2123] - Add zeppelin Puppet recipes
-    * [BIGTOP-2149] - Zeppeling 0.5.5 has been officially released. Change the source ref
-    * [BIGTOP-2154] - spark-shell doesn&#39;t start anymore without Hive libs in the classpath
-    * [BIGTOP-2166] - Zeppelin shouldn&#39;t be build against constant version of Ignite
-    * [BIGTOP-2167] - Zeppelin interpreter list doesn&#39;t include Ignite
-    * [BIGTOP-2169] - Zeppeling has more upstream dependencies
-    * [BIGTOP-2174] - Bump up ignite-hadoop version to the latest 1.5.0-b1
-    * [BIGTOP-2185] - Exclude Zeppelin interpreter.json from RAT check
-    * [BIGTOP-2219] - Comb the smoke-tests to make code base easier for refactoring
-    * [BIGTOP-2226] - Remove gradle wrapper folder in smoke-tests
-    * [BIGTOP-2234] - TestBlockRecovery incorrectly fails on a single node cluster
-    * [BIGTOP-2235] - Allow smoke-tests to use filename regexps instead of explicite listings
-    * [BIGTOP-2245] - TestFuseHCFS might fall on &#39;+&#39; op. involving String and GString; class name is wrong
-    * [BIGTOP-2267] - Zeppeling 0.5.6 is out; let&#39;s bump it in our stack
-    * [BIGTOP-2271] - Update maven rat config to match one from gradle
-    * [BIGTOP-2277] - release assembly needs to include bigtop_toolchain
-    * [BIGTOP-2278] - Wrap separate maven release steps into convenient gradle task
-
-** Bug
-    * [BIGTOP-1022] - Giraph build script should also specify ZooKeeper, HBase, and Hive versions
-    * [BIGTOP-1318] - Consider not forking compute-classpath.sh for spark
-    * [BIGTOP-1344] - spec files assume RPMs being built on Red Hat
-    * [BIGTOP-1352] - Refactor puppet code for installing JDK7
-    * [BIGTOP-1658] - puppet recipe updates for latest spark (1.3+ )
-    * [BIGTOP-1690] - Puppet should automatically create data directories
-    * [BIGTOP-1789] - Spark 1.3.0 incompatible with Hive 1.1.0
-    * [BIGTOP-1805] - Upgrade Hadoop to 2.7 if released
-    * [BIGTOP-1838] - Pig build does not publish artifacts of build
-    * [BIGTOP-1877] - Upgrade Crunch to 0.12.0
-    * [BIGTOP-1886] - Kafka server can not create a log-cleaner.log file
-    * [BIGTOP-1892] - Current required version of gradle 2.4 is not used everywhere
-    * [BIGTOP-1893] - Compilation of hadoop-yarn-client failed 
-    * [BIGTOP-1894] - Snappy development packages are missing from bigtop_toolchain
-    * [BIGTOP-1896] - bigtop_toolchain broken bei ant update
-    * [BIGTOP-1902] - typo in bigtop-deploy/vm/vagrant-puppet-vm/vagrantconfig.yaml
-    * [BIGTOP-1905] - Update Hue build for the upcoming 3.9 release
-    * [BIGTOP-1909] - Include compiled .mo files for HUE i18n
-    * [BIGTOP-1913] - Update hive to 1.2.1
-    * [BIGTOP-1916] - Update Website for 1.0
-    * [BIGTOP-1917] - Simplify gradle creating apt/yum repositories for better CI 
-    * [BIGTOP-1936] - Provide JDK8 for Bigtop
-    * [BIGTOP-1937] - redhat-lsb is required by kafka daemon
-    * [BIGTOP-1938] - kafka packages /usr/bin on RPM 
-    * [BIGTOP-1940] - Consider removing tests expecting &#39;sudo&#39; from skip-list
-    * [BIGTOP-1946] - Missing ASL header in some of iTest files
-    * [BIGTOP-1948] - Need to upgrade groovy-eclipse-batch as it keeps pulling from non-existing repo
-    * [BIGTOP-1949] - Sqoop 1.4.5 artifacts aren&#39;t getting resolved in the release...
-    * [BIGTOP-1950] - Upgrade maven-assembly plugin: StackOverFlowException is thrown
-    * [BIGTOP-1951] - Fix licenses in the source files
-    * [BIGTOP-1954] - Change the component name in the MAINTAINERS.txt
-    * [BIGTOP-1956] - Multi RS HBase requires unique hbase.tmp.dir to be set for each RS on a node
-    * [BIGTOP-1958] - Upgrade default repositories and docker images to 1.0
-    * [BIGTOP-1960] - The smoke-test wrapper in bigtop-deploy can only be used in redhat series of Linux
-    * [BIGTOP-1963] - Upgrade Mahout to 0.11.0
-    * [BIGTOP-1966] - site&#39;s index.xml is malformatted
-    * [BIGTOP-1967] - Update the front-page of the website with new CI hostname
-    * [BIGTOP-1987] - Recover resources/kmeans_data.txt for Spark smokes
-    * [BIGTOP-1996] - Dockerfiles for bigtop-slaves
-    * [BIGTOP-1999] - website link to the release bits is broken and points to the top-level mirror&#39;s directory
-    * [BIGTOP-2003] - Bigtop puppet fails to deploy on Ubuntu due to a hiera 1.3.0 bug
-    * [BIGTOP-2004] - Download task fails with Gradle 2.6
-    * [BIGTOP-2007] - bigtop.mk version of the stack needs to be bumped to 1.1.0-SNAPSHOT
-    * [BIGTOP-2008] - build.gradle has out-of-date version
-    * [BIGTOP-2014] - [VM provisioner] Missing FQDN on Ubuntu causes puppet deployment malfunction
-    * [BIGTOP-2016] - tez does not build on opensuse
-    * [BIGTOP-2017] - Rebase bigtop-slaves on bigtop-puppet
-    * [BIGTOP-2019] - BigPetStore Spark isn&#39;t compiling due to changes in SQL API
-    * [BIGTOP-2026] - Phoenix build defines HBASE_VERSION in two different places.
-    * [BIGTOP-2027] - Bump gradle version, the wrapper to 2.7
-    * [BIGTOP-2028] - Enhance puppet config of zookeeper to support kerberized clients
-    * [BIGTOP-2032] - Tez install does not set up tez jars on hdfs causing Pig to fail
-    * [BIGTOP-2037] - BIGTOP-1746 Added Files Without Apache License Headers
-    * [BIGTOP-2038] - Pig destination name incorrect
-    * [BIGTOP-2039] - Solr download URL is incorrect
-    * [BIGTOP-2040] - Mahout can not be build with Maven 3.0.5 - build containers need to be upgraded
-    * [BIGTOP-2041] - Spark pkg name is incorrect
-    * [BIGTOP-2042] - Tachyon name is incorrect
-    * [BIGTOP-2043] - Kafka source incorrect
-    * [BIGTOP-2044] - Unnecessary printout has been introduced by BIGTOP-1494
-    * [BIGTOP-2046] - puppet module search path
-    * [BIGTOP-2050] - Description of clean tasks have null entries
-    * [BIGTOP-2052] - Remove obsolete environment variables
-    * [BIGTOP-2053] - After rebasing on Hadoop 2.7.1 yarn module should be returned to ignite-hadoop build
-    * [BIGTOP-2054] - Update Pig
-    * [BIGTOP-2056] - Remove top-level check-env.sh
-    * [BIGTOP-2057] - null check doesn&#39;t safeguard against non-existing values in the BOM config
-    * [BIGTOP-2059] - Bump Ignite to 1.4
-    * [BIGTOP-2061] - toolchain is failing because add-apt-repository command isn&#39;t available off-hand
-    * [BIGTOP-2062] - cluster.yaml declares undefined vars; apply is broken
-    * [BIGTOP-2066] - init-hdfs.sh is broken by recent hadoop update
-    * [BIGTOP-2068] - Cannot Build Bigtop-Utils packages
-    * [BIGTOP-2071] - Gstring.empty doesn&#39;t exist
-    * [BIGTOP-2074] - spark-worker doesn&#39;t start during deploy from master
-    * [BIGTOP-2082] - Remove x86 Assembler Code from zookeeper
-    * [BIGTOP-2083] - smoke-tests are still on hadoop 2.6.0
-    * [BIGTOP-2084] - rename all puppet modules to have an underscore rather a dash 
-    * [BIGTOP-2085] - gradle toolchain should install necessary puppet modules
-    * [BIGTOP-2086] - Install essential puppet modules along with puppet itself
-    * [BIGTOP-2088] - Support protobuf installation for OPENPOWER
-    * [BIGTOP-2089] - Fix bigtop.sh generation
-    * [BIGTOP-2090] - Remove left-over junk after BIGTOP-2053
-    * [BIGTOP-2094] - ignite-hadoop fails to deploy after BIGTOP-2084
-    * [BIGTOP-2098] - Update bigtop_deploy to current state of puppetize.sh
-    * [BIGTOP-2114] - hive is broken after BIGTOP-2104
-    * [BIGTOP-2115] - phoenix is broken after BIGTOP-2104
-    * [BIGTOP-2120] - opensuse: Hue packages are broken after BIGTOP-1905
-    * [BIGTOP-2121] - Missing &#39;:&#39; in bigtop::roles description
-    * [BIGTOP-2126] - Fix default repo locations in the deployment site.pp
-    * [BIGTOP-2127] - opensuse: fix kerberos python module
-    * [BIGTOP-2128] - ignite-hadoop man page still refers to the Incubator
-    * [BIGTOP-2131] - [Docker] bigtop slaves images should be built without cache
-    * [BIGTOP-2139] - crunch compile fails with Insufficient memory on POWER
-    * [BIGTOP-2140] - hbase compile fails with Insufficient memory on POWER
-    * [BIGTOP-2144] - Update default repos in Bigtop Provisioner
-    * [BIGTOP-2147] - Minor nits in bigpetstore-spark/README.md
-    * [BIGTOP-2148] - generator.SparkDriver outputs empty data in cluster mode
-    * [BIGTOP-2150] - Reversing directional build behavior 
-    * [BIGTOP-2152] - Fix conflict with libressl-devel from mysql-devel
-    * [BIGTOP-2155] - Fix Hue 3.9.0 build failed on Fedora
-    * [BIGTOP-2156] - Fix Sqoop 1.4.5 build failed on OpenSuSE
-    * [BIGTOP-2159] - unable to build RPM for zeppelin
-    * [BIGTOP-2162] - phoenix-core-*-tests.jar should be located at $PHOENIX_HOME
-    * [BIGTOP-2168] - A erroneous typo in FailureVars#loadProps method
-    * [BIGTOP-2171] - shared gradle directory on slave containers should be writable for non-root users
-    * [BIGTOP-2172] - get rid of gradlew inside of subprojects
-    * [BIGTOP-2173] - smoke-tests need to pass-through system properties
-    * [BIGTOP-2175] - BIGTOP_HOME should be either asserter or replaced with setup logic
-    * [BIGTOP-2176] - Package version of ignite-hadoop has illegal &quot;-&quot; character
-    * [BIGTOP-2177] - Build dependecies list has wrong component name for ignite
-    * [BIGTOP-2181] - Setting Ignite version to use &#39;.&#39; broke Zeppelin build.
-    * [BIGTOP-2187] - toolchain creates user with different ids on different systems
-    * [BIGTOP-2188] - bigtop/puppet:ubuntu-14.04 image does not support ppc64le 
-    * [BIGTOP-2189] - bigtop/slave:ubuntu-14.04 image does not support ppc64le 
-    * [BIGTOP-2190] - libprotobuf8 2.5.0 installation fails on ubuntu 14.04 for ppc64le
-    * [BIGTOP-2191] - openjdk-7 is missing in bigtop/slave:ubuntu-14.04-ppc64le docker image 
-    * [BIGTOP-2196] - Docker configuration for ubuntu-15.04-ppc64le
-    * [BIGTOP-2198] - adding bigtop/slaves:ubuntu-15.04 support for ppc64le
-    * [BIGTOP-2199] - Add change introduced by BIGTOP-2171
-    * [BIGTOP-2200] - Fix download path for protobuf ubuntu-15.04 on ppc64le
-    * [BIGTOP-2201] - Fix BIGTOP-2200 again
-    * [BIGTOP-2215] - Enable autopurge in zookeeper
-    * [BIGTOP-2221] - rpmbuild is missing in bigtop/slaves:trunk-opensuse-13.2 docker image
-    * [BIGTOP-2222] - Hadoop do-component-build should pull in and use BOM
-    * [BIGTOP-2223] - .dockerignore doesn&#39;t have ASL header
-    * [BIGTOP-2236] - GRADLE_USER_HOME should be set for bigtop/slaves images only
-    * [BIGTOP-2237] - Nullify the standard output when generating gradle cache 
-    * [BIGTOP-2238] - Provisioner should propagate hdfs ssh keys for testing
-    * [BIGTOP-2240] - add -XX:PermSize=1024m -XX:MaxPermSize=1024m to build environment
-    * [BIGTOP-2242] - Running ./gradlew should only require JAVA_HOME for test task
-    * [BIGTOP-2243] - :itest-common:test is broken without JAVA_HOME being set
-    * [BIGTOP-2244] - CI provisioner tests are failing after BIGTOP-2227 change
-    * [BIGTOP-2246] - Add smoke tests for HCFS
-    * [BIGTOP-2247] - Expand HDFS smoke test suite
-    * [BIGTOP-2252] - provisional hdfs ssh keys couldn&#39;t be found during deployment
-    * [BIGTOP-2255] - bigtop/deploy:ubuntu-15.04 image does not support ppc64le
-    * [BIGTOP-2256] - Promote return codes from docker to the CI while building images
-    * [BIGTOP-2272] - moved hdfs ssh key are failing rat check
-    * [BIGTOP-2275] - Update configuration files for jsvc
-    * [BIGTOP-2276] - Zeppeling added war_tempdir location, which needs to be set
-    * [BIGTOP-2281] - Add HIVE-12875 to Bigtop
-    * [BIGTOP-2288] - Hadoop time-server fails to start 
-    * [BIGTOP-2299] - test resources are breaking rat
-
-** Improvement
-    * [BIGTOP-1126] - Add Hama to Bigtop
-    * [BIGTOP-1131] - Update Build Requirements on our web pages
-    * [BIGTOP-1309] - Gradle environment overhaul
-    * [BIGTOP-1443] - Update front page of website
-    * [BIGTOP-1809] - Remove gridgain-hadoop component once ignite-hadoop gets added
-    * [BIGTOP-1888] - Upgrade Flume to 1.6.0
-    * [BIGTOP-1908] - Move bigtop-deploy&#39;s dockerfiles into a centralized docker place
-    * [BIGTOP-1910] - Adjust the dependency for hue-beeswax
-    * [BIGTOP-1914] - improve puppet README.md file
-    * [BIGTOP-1915] - Upgrade Oozie to 4.2.0
-    * [BIGTOP-1920] - Include YCSB in Bigtop
-    * [BIGTOP-1921] - Puppet recipe for YCSB
-    * [BIGTOP-1923] - Bump Ignite to 1.2 with major fixes
-    * [BIGTOP-1941] - Upgrade Phoenix to 4.4.0
-    * [BIGTOP-1942] - Upgrade Phoenix to 4.6.0
-    * [BIGTOP-1943] - Upgrade SCALA version to 2.10.4
-    * [BIGTOP-1944] - Upgrade Spark version to 1.5.1
-    * [BIGTOP-1955] - Upgrade Ignite Hadoop component version from 1.2.0 to 1.3.0
-    * [BIGTOP-1964] - Upgrade Tez version to 0.6.2
-    * [BIGTOP-1970] - Ignite IGFS now fully supports mutiltenancy: deployment should configure it with HDFS backing
-    * [BIGTOP-1971] - Support Spark SQL CLI with Apache Hive out of the box
-    * [BIGTOP-1974] - Revise SPARK_HOME/conf/spark-env.sh
-    * [BIGTOP-1993] - Bump groovy to 2.4.4 in the development toolchain
-    * [BIGTOP-1997] - Bump bigtop-groovy runtime to 2.4.4
-    * [BIGTOP-1998] - Toolchain installer needs to switch from CGI to Lua mirror selection
-    * [BIGTOP-2005] - Remove SCALA_HOME requirement
-    * [BIGTOP-2018] - Create a puppetizing script
-    * [BIGTOP-2020] - Add Gradle RAT plugin to the top-level project
-    * [BIGTOP-2063] - Provide default config to deploy hive on top of Ignite
-    * [BIGTOP-2065] - Update deployment README.md to reflect on better deployment experience
-    * [BIGTOP-2080] - Investigate removing Scala from the toolchain
-    * [BIGTOP-2081] - implement a nexus docker container for CI
-    * [BIGTOP-2091] - Build ignite-hadoop assembly with specific version of Spark
-    * [BIGTOP-2102] - Upgrade YCSB to 0.4.0
-    * [BIGTOP-2103] - [Docker] Move bigtop/slaves image build to gradle
-    * [BIGTOP-2110] - [Docker] Cache packages required by gradle to execute into bigtop/slaves images
-    * [BIGTOP-2119] - Bump Mahout version to 0.11.1
-    * [BIGTOP-2141] - Have a way to specify alternative BOM file in the build time
-    * [BIGTOP-2142] - Source cleanup: bigtop-repos shouldn&#39;t be top-level.
-    * [BIGTOP-2143] - [Puppet] Automatically generate default repo
-    * [BIGTOP-2153] - Simplify and complete storage dir creation
-    * [BIGTOP-2163] - Add a phoenix-pherf sub-package for Phoenix
-    * [BIGTOP-2164] - Phoenix Queryserver should write it&#39;s logs into /var/log/phoenix/
-    * [BIGTOP-2192] - Start generating ubuntu-14.04 deployment image
-    * [BIGTOP-2194] - Add Ubuntu configuration to the docker provisioner
-    * [BIGTOP-2239] - Smoke tests should have a single location for logger configuration
-    * [BIGTOP-2259] - Check for smoke-test projects should be generalized 
-
-** New Feature
-    * [BIGTOP-1149] - Package Kite
-    * [BIGTOP-1746] - Introduce the concept of roles in bigtop cluster deployment
-    * [BIGTOP-1769] - Zeppelin Integration
-    * [BIGTOP-1976] - Replace Pair type with Apache Commons Lang Pair type
-
-** Task
-    * [BIGTOP-1701] - Upgrade to Hive 1.1.0
-    * [BIGTOP-1795] - Upgrade bigtop_toolchain to Maven 3.2.5
-    * [BIGTOP-1821] - Add smoke tests for Ignite Hadoop Bigtop module
-    * [BIGTOP-1897] - Umbrella JIRA for CI overhaul
-    * [BIGTOP-1925] - Add ywkim as a maintainer for YCSB
-    * [BIGTOP-1953] - Upgrade bigtop_toolchain to Maven 3.3.3
-    * [BIGTOP-2069] - Update README.md to reflect CTR trial
-    * [BIGTOP-2078] - Define Bigtop 1.1 release BOM
-    * [BIGTOP-2146] - Upgrade bigtop toolchain ant version to 1.9.6
-    * [BIGTOP-2214] - Bump up ignite-hadoop version to the latest stable 1.5.0.final
-    * [BIGTOP-2297] - Update provisioner pointers to (future) 1.1 release repos
-
-** Test
-    * [BIGTOP-2158] - Update README.md to reflect the acceptance of CTR model
-    * [BIGTOP-2232] - Add smoke tests for HDFS
-    * [BIGTOP-2249] - Add YARN smoke tests
-
-
-* Release 1.0.0 (2015-05-29)
-
-Release Notes - Bigtop - Version 1.0.0
-
-** Sub-task
-    * [BIGTOP-1105] - Fix lintian errors in the hadoop package
-    * [BIGTOP-1334] - Add DFS tests to TestCLI
-    * [BIGTOP-1392] - Hive basic smoke test fails
-    * [BIGTOP-1461] - Add a simple Hive validation to smoke-tests.
-    * [BIGTOP-1465] - Include Sqoop1 in bigtop 0.9
-    * [BIGTOP-1468] - Include zookeeper 3.4.6
-    * [BIGTOP-1486] - Upgrade Hue to 3.7
-    * [BIGTOP-1535] - Add Spark ETL script to BigPetStore
-    * [BIGTOP-1537] - [BigPetStore] Add  Spark Product Recommender example
-    * [BIGTOP-1543] - hive-0.14 in bigtop
-    * [BIGTOP-1550] - Spark update to 1.1.0
-    * [BIGTOP-1563] - Puppet deployment needs to setup user hdfs keys for password-less logins
-    * [BIGTOP-1571] - Remove Whirr from the BOM
-    * [BIGTOP-1594] - Upgrade Pig to 0.14.0
-    * [BIGTOP-1609] - Use openjdk7 for CentOS
-    * [BIGTOP-1615] - Tests dependencies are on old Hadoop versions (2.3.0, 2.4.1)
-    * [BIGTOP-1632] - Support JMX monitoring when multiple HBase RS is brought up in a node
-    * [BIGTOP-1649] - Upgrade Apache Flume to 1.5.2
-    * [BIGTOP-1707] - Upgrade Hadoop to 2.6.0
-    * [BIGTOP-1727] - Fix a build failure for Spark 1.2.1
-    * [BIGTOP-1728] - jackson dependency management for Spark 1.2.1 and  Hadoop 2.6.0
-    * [BIGTOP-1738] - Update HBase because javadoc broken
-    * [BIGTOP-1765] - Bump the stack version to 1.0
-    * [BIGTOP-1768] - Use openjdk in ubuntu14, Remove references to jdk6 
-    * [BIGTOP-1842] - Remove gridgain-hadoop from BOM
-    * [BIGTOP-1899] - Migrate CI master to EC2 instance donated by Amazon EMR team
-    * [BIGTOP-1900] - Upgrade bigtop/slaves images on bigtop's dockerhub
-    * [BIGTOP-1901] - Provide bigtop/deploy images on dockerhub for CI and users to consume docker provisioner
-    * [BIGTOP-1947] - Fix RAT plugin configuration to be able to RAT-validate all published artifacts
-
-** Bug
-    * [BIGTOP-894] - Pig compilation fails on RPM systems on Bigtop trunk
-    * [BIGTOP-965] - IntegrationTestHiveSmokeBulk not being run
-    * [BIGTOP-972] - fix packagetest package remove logic
-    * [BIGTOP-977] - hive smoke tests are full of internal infrastructure details irrelevant to Bigtop
-    * [BIGTOP-1050] - Permissions on YARN LCE should be 4754
-    * [BIGTOP-1104] - Fix lintian errors
-    * [BIGTOP-1121] - implement tests for HDFS snapshots
-    * [BIGTOP-1135] - Hue has google analytics enabled by default
-    * [BIGTOP-1194] - redhat-lsb-core is sufficient in spec files
-    * [BIGTOP-1277] - Remove Windows files from unix packaging
-    * [BIGTOP-1287] - Mahout smokes : Remove dirchlet/meanshift clustering
-    * [BIGTOP-1327] - bigpetstore.arch is out of date
-    * [BIGTOP-1356] - Generate hive.install on the fly
-    * [BIGTOP-1357] - Get rid of HCatalog packaging code
-    * [BIGTOP-1381] - Datafu and Spark .deb build is broken 
-    * [BIGTOP-1384] - Implement Gradle Wrapper for smoke tests and cleanup.
-    * [BIGTOP-1395] - Simplifying non-component versions evaluation
-    * [BIGTOP-1445] - Pig fails with clean .ivy and .m2
-    * [BIGTOP-1446] - Spark fails with clean .m2
-    * [BIGTOP-1451] - smoke-tests (gradle based) Mahout need to run a real smoke test.
-    * [BIGTOP-1457] - Hue do-component-build does not set version properly
-    * [BIGTOP-1458] - Maven build severly broken 
-    * [BIGTOP-1459] - Remove hadoop-client jar from packages that bundle it
-    * [BIGTOP-1460] - maven dependencies broken
-    * [BIGTOP-1462] - Failed to build spark by gradle
-    * [BIGTOP-1467] - version of hadoop-auth in oozie is wrong 
-    * [BIGTOP-1472] - Hadoop RPM build is broken after BIGTOP-1462
-    * [BIGTOP-1487] - Documentation for use case and deployment models for failure tests.
-    * [BIGTOP-1488] - bigtop_toolchain hardcode the apache mirror sites for downloading maven
-    * [BIGTOP-1490] - Adding GridGain to BigTop
-    * [BIGTOP-1491] - Update docker-puppet README 
-    * [BIGTOP-1497] - Add tachyon to bigtop 
-    * [BIGTOP-1502] - Improve puppet deployment for new gridgain component
-    * [BIGTOP-1504] - Bigtop docker provision does not work on mac
-    * [BIGTOP-1506] - bigtop-utils won't detect JAVA_HOME in some cases
-    * [BIGTOP-1508] - fix Puppet warnings under Puppet 3 
-    * [BIGTOP-1509] - Update deployment README after BIGTOP-1047
-    * [BIGTOP-1511] - TestCLI: make TestCLI compatible with Hadoop 2.4 (HADOOP-8691)
-    * [BIGTOP-1513] - FailureExecutor.groovy is in the wrong module, causing compiler errors.
-    * [BIGTOP-1521] - Bigtop smoke-tests hierarchy and fast failure
-    * [BIGTOP-1522] - nodemanager deployment needs to include hadoop-mapreduce package
-    * [BIGTOP-1523] - Gradle install for test artifacts uses wrong sequence of the targets
-    * [BIGTOP-1524] - FailureExecutor  breaks smoke tests : Smoke tests should run from source
-    * [BIGTOP-1526] - property file for FailureVars can not be placed to {{/}}
-    * [BIGTOP-1534] - Update README.md to reflect new build system and other changes
-    * [BIGTOP-1541] - Support Debian jessie in Bigtop_toolchain; cleanup of pp files
-    * [BIGTOP-1542] - Debian Packages will not build
-    * [BIGTOP-1544] - [BigPetStore] Use of java.io.File and check for directory existence will fail with non-POSIX DFSs (HDFS, S3, etc.)
-    * [BIGTOP-1548] - hue uses snapshot dependency to removed snapshot
-    * [BIGTOP-1549] - spark compile broken
-    * [BIGTOP-1551] - build stops because gradle does not handle symlinks well
-    * [BIGTOP-1553] - puppet installation fails when components value is a single item instead of a list
-    * [BIGTOP-1554] - bigtop-deploy/puppet/config/site.csv was wrongfully committed.
-    * [BIGTOP-1562] - Critical tachyon pupet improvements.
-    * [BIGTOP-1564] - docker-puppet doesn't use vagrantconfg.yaml for configuration parameters
-    * [BIGTOP-1566] - Puppet README file needs to reflect multiple modules directoty requirement
-    * [BIGTOP-1567] - puppet cannot install components whatever given to it
-    * [BIGTOP-1568] - bigtop01 needs gradle
-    * [BIGTOP-1570] - docker-puppet fails to deploy due to stdlib can not be found
-    * [BIGTOP-1574] - Phoenix packaging requires build and classpath changes after PHOENIX-1455
-    * [BIGTOP-1579] - Implement patching for Bigtop
-    * [BIGTOP-1580] - Improve Bigtop Toolchain: Versioning of Packages
-    * [BIGTOP-1585] - test artifacts tasks include extra tasks for nonexisting components
-    * [BIGTOP-1586] - BigPetStore-Spark only works on the East Coast .
-    * [BIGTOP-1587] - Fix typos in install-tomcat.sh
-    * [BIGTOP-1588] - Current Bigtop Pig does not build because of API Change in Hive
-    * [BIGTOP-1589] - Prioritization is broken in bigtop-detect-javahome
-    * [BIGTOP-1592] - Integration test IntegrationTestClusterFailures is failing
-    * [BIGTOP-1596] - bigtop build fails because it tries to write to forrest installation
-    * [BIGTOP-1604] - Create a MAINTAINERS.txt File
-    * [BIGTOP-1605] - Tachyon RPM does not depend on bigtop utils
-    * [BIGTOP-1606] - Tachyon webapp directory not located where expected
-    * [BIGTOP-1610] - Fix /etc/profile.d/bigtop.sh scripts 
-    * [BIGTOP-1611] - mahoutsmokes aren't compiling
-    * [BIGTOP-1618] - iTest integration tests need only to be bound to verify goal
-    * [BIGTOP-1620] - Hadoop deb Packages for architecture "all" containing binaries
-    * [BIGTOP-1626] - Add bmahe as a maintainer for Apache Flume
-    * [BIGTOP-1633] - Pig compile fails again 
-    * [BIGTOP-1635] - hue compile on vanilla debian
-    * [BIGTOP-1636] - Missing patches and files for hue
-    * [BIGTOP-1639] - Add Olaf Flebbe to dev list
-    * [BIGTOP-1642] - Restructure and enhance toolchain
-    * [BIGTOP-1652] - Fix BigPetStore-Spark Data format regression
-    * [BIGTOP-1654] - change some hadoop puppet variables from local to node scope
-    * [BIGTOP-1656] - add tez variables to hadoop-env.sh
-    * [BIGTOP-1657] - bigtop-deploy puppets to support tez
-    * [BIGTOP-1660] - Upgrade to Hive-1.0
-    * [BIGTOP-1662] - puppet: Fix hadoop configuration file incompleteness due to hiera conversion
-    * [BIGTOP-1663] - TestHadoopSmoke is failing because of improper set-up
-    * [BIGTOP-1665] - Update Wiki pages on how to develop and build Bigtop
-    * [BIGTOP-1668] - puppet: Adjust kerberos module for current Debian
-    * [BIGTOP-1669] - puppet: Hadoop: Separate dependencies on Kerberos keytabs
-    * [BIGTOP-1677] - Tez packaging is still failing
-    * [BIGTOP-1678] - Use openjdk7 for AmazonLinux
-    * [BIGTOP-1687] - Puppet: fix wrong array representation for hadoop-zookeeper::server::ensemble
-    * [BIGTOP-1694] - puppet: Make httpfs subscribe to core-site and hdfs-site
-    * [BIGTOP-1699] - sqoop does not build on debian
-    * [BIGTOP-1703] - Limt override of JAVA_OPTS in install_solr.sh to cause minimal side-effects
-    * [BIGTOP-1706] - Make TestBlockRecovery runnable under all users
-    * [BIGTOP-1710] - Spark 1.1.0 does not build for DEB
-    * [BIGTOP-1711] - add packages to compile hue
-    * [BIGTOP-1712] - Specification for Docker images.
-    * [BIGTOP-1722] - upgrade tachyon to 0.6.0
-    * [BIGTOP-1723] - update Tachyon Maintainers
-    * [BIGTOP-1724] - Update centos vagrant box 
-    * [BIGTOP-1729] - Install Hive has typo in HCATALOG option
-    * [BIGTOP-1731] - Gradle mustRunAfter doesn't work is expected
-    * [BIGTOP-1732] - remove has_ssh in Docker provisioner
-    * [BIGTOP-1733] - fix local_yum implementation
-    * [BIGTOP-1745] - Add Mark Grover as committer on the website
-    * [BIGTOP-1748] - Remove assert on fs.trash.interval from TestCLI and TestDFSCLI
-    * [BIGTOP-1751] - Puppet deployment ignores bigtop_repo_uri variable
-    * [BIGTOP-1756] - Add HADOOP_MAPRED_HOME property to common
-    * [BIGTOP-1757] - bigtop_toolchain has to differentiate between centos6 and centos7
-    * [BIGTOP-1758] - Remove redundant property checks from TestHadoopExamples
-    * [BIGTOP-1761] - Delete testConf.xml from hadoop-smoke resources 
-    * [BIGTOP-1763] - Handle broken symlinks when copying jars
-    * [BIGTOP-1764] - Fix copying mapreduce jars to HDFS for Oozie
-    * [BIGTOP-1774] - Update mailing list page
-    * [BIGTOP-1775] - Fix typos and mistakes on mailing list page
-    * [BIGTOP-1776] - Resolve an warning on packaging Hadoop RPM
-    * [BIGTOP-1778] - spark-1.3.0 broke hive
-    * [BIGTOP-1779] - giraph compile broken
-    * [BIGTOP-1780] - bigtop_toolchain: JAVA_HOME incorrect, remove JAVA6_HOME
-    * [BIGTOP-1781] - tachyon fails to build
-    * [BIGTOP-1790] - Top level menu says 'Power By' - needs to be fixed
-    * [BIGTOP-1793] - bigtop_toolchain: remove JAVA_HOME/bin from PATH
-    * [BIGTOP-1798] - init-hdfs.sh uses groovy but no dependency in package description given
-    * [BIGTOP-1804] - Not again: srpm target fails on SUSE
-    * [BIGTOP-1808] - hive 1.0.0 : kerberos does not work correctly
-    * [BIGTOP-1810] - Spark thriftserver service does not indicate success
-    * [BIGTOP-1812] - fix bps pig integration test
-    * [BIGTOP-1813] - Create /user/hbase HDFS directory
-    * [BIGTOP-1814] - Puppet deployment code needs to work with ignite-hadoop component
-    * [BIGTOP-1815] - Tez build is broken in the container
-    * [BIGTOP-1816] - Puppet deploy is broken on Ubuntu
-    * [BIGTOP-1817] - bigtop-utils deb is missing  init-hcfs.groovy file
-    * [BIGTOP-1818] - ignite-hadoop-service debian is missed; causing Puppet deploy to fail
-    * [BIGTOP-1824] - Tachyon master and worker daemon script does not work propertly
-    * [BIGTOP-1827] - Tachyon package name conflict on Debian/Ubuntu
-    * [BIGTOP-1828] - Puppet: Tachyon does not get formatted before tachyon daemons are started up
-    * [BIGTOP-1830] - Move apache-forrest installation from  bigtop_toolchain to pig compile
-    * [BIGTOP-1832] - hdfs-site.xml: update shortcut reader and remove obsolete shortcut reader users
-    * [BIGTOP-1833] - Bump Ignite to 1.1.0
-    * [BIGTOP-1835] - Update project RDF file
-    * [BIGTOP-1839] - Building RPM for Zookeeper 3.4.6 is broken
-    * [BIGTOP-1840] - datafu does not pick up pig dependency from local repository
-    * [BIGTOP-1841] - toolchain can not set java correctly in fedora
-    * [BIGTOP-1845] - toolchain can not drop bigtop.sh in fedora because of puppet syntax error
-    * [BIGTOP-1848] - spark deb packages broken
-    * [BIGTOP-1849] - toolchain can not set java home correctly in CentOS 7
-    * [BIGTOP-1850] - Update Hue to 3.8
-    * [BIGTOP-1853] - Phoenix build does not properly override Hadoop version property
-    * [BIGTOP-1855] - Remove Obsolete Jobtracker settings from mapred-site.xml
-    * [BIGTOP-1857] - Support yarn-built-in proxy
-    * [BIGTOP-1859] - Unable to build Hue packages on Amazon Linux
-    * [BIGTOP-1860] - ignite-hadoop release URL should point to apache dist server, not a mirror
-    * [BIGTOP-1861] - giraph does not build
-    * [BIGTOP-1865] - circular dependency between bigtop-groovy and bigtop-utils
-    * [BIGTOP-1866] - openssl-devel is required by hue RPM package
-    * [BIGTOP-1867] - Upgrade Gradle version to 2.4
-    * [BIGTOP-1868] - Address the custom 'clean' warning 
-    * [BIGTOP-1869] - Hue 3.8.1 refinements
-    * [BIGTOP-1870] - Latest version of Puppet::Apt doesn't work for our deployment recipes
-    * [BIGTOP-1872] - Improve HUE puppet recipies
-    * [BIGTOP-1874] - HBase build failed due to Codehaus repository is out of service
-    * [BIGTOP-1875] - Oozie build failed due to Codehaus repository is out of service
-    * [BIGTOP-1876] - Update puppet recipes for Sqoop2
-    * [BIGTOP-1879] - vagrant-puppet-vm is broken
-    * [BIGTOP-1886] - Kafka server can not create a log-cleaner.log file
-    * [BIGTOP-1893] - Compilation of hadoop-yarn-client failed
-    * [BIGTOP-1894] - Snappy development packages are missing from bigtop_toolchain
-    * [BIGTOP-1896] - bigtop_toolchain broken bei ant update
-    * [BIGTOP-1902] - typo in bigtop-deploy/vm/vagrant-puppet-vm/vagrantconfig.yaml
-    * [BIGTOP-1916] - Update Website for 1.0
-    * [BIGTOP-1937] - redhat-lsb is required by kafka daemon
-    * [BIGTOP-1938] - kafka packages /usr/bin on RPM
-    * [BIGTOP-1946] - Missing ASL header in some of iTest files
-    * [BIGTOP-1949] - Sqoop 1.4.5 artifacts aren't getting resolved in the release...
-    * [BIGTOP-1950] - Upgrade maven-assembly plugin: StackOverFlowException is thrown
-    * [BIGTOP-1951] - Fix licenses in the source files
-    * [BIGTOP-1958] - Upgrade default repositories and docker images to 1.0
-
-** Improvement
-    * [BIGTOP-1047] - Support Puppet 3.x
-    * [BIGTOP-1204] - Add support for AmazonLinux
-    * [BIGTOP-1235] - Speed-up init-hdfs.sh for complete HCFS compliant provisioning
-    * [BIGTOP-1275] - BigPetStore: Add all 50 states
-    * [BIGTOP-1301] - Groovy 2.3.0 is out! Let's bundle it into the stack
-    * [BIGTOP-1325] - Update TestHDFSCLI to include tests from upstream
-    * [BIGTOP-1366] - Updated, Richer Model for Generating Data for BigPetStore 
-    * [BIGTOP-1388] - Use cluster failure tests during other tests with command line parametrization
-    * [BIGTOP-1414] - Add Apache Spark implementation to BigPetStore
-    * [BIGTOP-1423] - Add Groovy installation to the bigtop_toolchain
-    * [BIGTOP-1449] - Add RC Testing support to vagrant recipe
-    * [BIGTOP-1450] - Eliminate broken hive test artifacts in favor of smoke-tests.
-    * [BIGTOP-1478] - Start tomcat watchdog as part of solr init scripts
-    * [BIGTOP-1481] - Log files are cluttered with messages from tar
-    * [BIGTOP-1484] - Include python in toolchain
-    * [BIGTOP-1489] - move the tomcat_watchdog code out of install_solr.sh into a separate file in bigtop-utils in order to facilitate reusability
-    * [BIGTOP-1495] - Remove make build system
-    * [BIGTOP-1498] - Add RC Testing to Docker based Bigtop provisioner
-    * [BIGTOP-1501] - come up with a way to use Jenkins plugins/Groovy console to create jobs
-    * [BIGTOP-1510] - Minor README update aboud bigtop-deploy
-    * [BIGTOP-1517] - refactor vagrant provisioners to be configurable by yaml file
-    * [BIGTOP-1527] - Allow to fetch package's source code from Git
-    * [BIGTOP-1545] - [BigPetStore] Bump version of BPS data generator library to 0.2.1
-    * [BIGTOP-1547] - Confirm tachyon functioning in vagrant recipes.
-    * [BIGTOP-1555] - remove mapred-app from docs or code snippet (post-BIGTOP-1522), cleanup vagrant-puppet to core components.
-    * [BIGTOP-1557] - website misses ci and announce mailing lists info
-    * [BIGTOP-1558] - Make gradle easier to navigate
-    * [BIGTOP-1559] - Tests are failing in Apache CI build
-    * [BIGTOP-1569] - provide alias targets for building packages native to the OS
-    * [BIGTOP-1576] - Add Vagrant installation to bigtop_toolchain
-    * [BIGTOP-1578] - Refactor vagrant-puppet provisioner to have its configurations all come from yaml
-    * [BIGTOP-1581] - Allow multiple Flume agents to be executed as a service using Bigtop init.d script
-    * [BIGTOP-1583] - update the default yum repo in puppet site.pp
-    * [BIGTOP-1590] - Add a functionality to run integration tests for itest
-    * [BIGTOP-1591] - JarContentTest is failing on JDK7
-    * [BIGTOP-1593] - Fix types in FailureVars 
-    * [BIGTOP-1600] - Improve front page content of website
-    * [BIGTOP-1601] - cleanup whitespaces across test-artifacts 
-    * [BIGTOP-1603] - Rename docker-puppet and vagrant-puppet to better indicate its function and difference
-    * [BIGTOP-1612] - Update README for smoke-tests
-    * [BIGTOP-1613] - Add .gitignore for vagrant-puppet
-    * [BIGTOP-1614] - Add .gitignore file for docker-puppet
-    * [BIGTOP-1621] - Migrate itest to gradle
-    * [BIGTOP-1627] - Move enable_local_yum implementation to bigtop-deploy/utils/setup-env.sh to share with Docker provisioner
-    * [BIGTOP-1634] - Puppet class parameter and hiera conversion
-    * [BIGTOP-1640] - Add missing files for Apache Spark
-    * [BIGTOP-1645] - Add subpackages for spark history server and thrift server 
-    * [BIGTOP-1651] - puppet: Format namenode and zkfc in noninteractive mode
-    * [BIGTOP-1653] - Add queries for customer, state, and product statistics w/ d3 friendly JSON output to analytics phase. 
-    * [BIGTOP-1659] - gridgain-hadoop packages need to provide unversioned softlinks for accelerator jar files
-    * [BIGTOP-1667] - Clean up and simplify the install phase for Apache Spark
-    * [BIGTOP-1670] - puppet: Support Kerberos authentication on Hadoop component web GUIs
-    * [BIGTOP-1674] - Account for BPS simulation "burn-in" time 
-    * [BIGTOP-1675] - Puppet: remove wheel group setting for bigtop_real_users
-    * [BIGTOP-1676] - Puppet: add deb repo auto deploy
-    * [BIGTOP-1681] - [BigPetStore] Clean up BPS SparkSQL analytics module, add new queries
-    * [BIGTOP-1683] - puppet hadoop-env.sh: Add option to configure tez environment
-    * [BIGTOP-1684] - puppet hadoop/yarn: Make container executor configurable
-    * [BIGTOP-1685] - puppet hadoop: Remove useless use of explicit hiera lookup
-    * [BIGTOP-1686] - puppet hadoop/mapred: Update and clean mapred template and namespace
-    * [BIGTOP-1691] - Heira site.yaml template needs to have repo uri 
-    * [BIGTOP-1692] - hieradata/site.yaml is missing gridgain-hadoop component
-    * [BIGTOP-1693] - Puppet stdlib should be automatically installed by toolchain
-    * [BIGTOP-1696] - Provide build command to show BOM list
-    * [BIGTOP-1697] - Bootstrap: hook up toolchain into gradle build
-    * [BIGTOP-1698] - Following BIGTOP-1697 improve README entry on toolchain setup
-    * [BIGTOP-1700] - Package YARN Timeline Server service for Apache Hadoop 
-    * [BIGTOP-1705] - puppet: Allow configuring which hue apps to install
-    * [BIGTOP-1709] - Enable "hadoop26" profile for Apache Tez
-    * [BIGTOP-1718] - add facility for printing out BOM in json format
-    * [BIGTOP-1721] - Remove unnecessary files from Sqoop package
-    * [BIGTOP-1725] - Exclude null values from BOM json output added in BIGTOP-1718
-    * [BIGTOP-1741] - Upgrade to Apache Phoenix 4.3.1
-    * [BIGTOP-1742] - Add ywkim as a maintainer for Apache Hive, Spark, Sqoop* and Phoenix
-    * [BIGTOP-1743] - Add RJ Nowling as committer to website
-    * [BIGTOP-1770] - Let the user specify which JDK package to be installed in vagrantconfig.yaml
-    * [BIGTOP-1771] - Let the user specify what components to be tested when running smoke-tests by Docker/VM provisioner
-    * [BIGTOP-1783] - Import BigPetStore Data Generator into BigTop
-    * [BIGTOP-1787] - puppet: Update hue.ini to match Hue 3.7.1 template
-    * [BIGTOP-1791] - Set group of /user/hue to "hue" in HDFS
-    * [BIGTOP-1794] - Update README for bigtop_toolchain
-    * [BIGTOP-1801] - Generalize subst pattern matching for BOM file
-    * [BIGTOP-1803] - Overide Vagrant configurations from environment variables
-    * [BIGTOP-1809] - Remove gridgain-hadoop component once ignite-hadoop gets added
-    * [BIGTOP-1829] - Tachyon configuration files should reside in /etc/tachyon/conf instead of /etc/tachyon
-    * [BIGTOP-1836] - Better UX for Docker provisioner by integrating it into gradle 
-    * [BIGTOP-1851] - Upgrade to HBase 0.98.12
-    * [BIGTOP-1862] - Add release download link to the front page of the website
-    * [BIGTOP-1919] - Remove fixConditional methods in BPS data generator
-    * [BIGTOP-1923] - Bump Ignite to 1.2 with major fixes
-    * [BIGTOP-1932] - [BigPetStore] Add larger product collection
-    * [BIGTOP-1939] - Enable basic hdfs-nfs gateway configuration
-
-** New Feature
-    * [BIGTOP-989] - Add Apache Kafka  to Apache Bigtop
-    * [BIGTOP-1417] - Dockerize the puppetized vagrant deployer
-    * [BIGTOP-1448] - Include zookeeper C library in bigtop
-    * [BIGTOP-1480] - Define BOM for 1.0 release of Bigtop
-    * [BIGTOP-1552] - Add support for AmazonLinux to bigtop-deploy/puppet
-    * [BIGTOP-1716] - Define SCALA_VERSION in bigtop.bom
-    * [BIGTOP-1744] - Add BigPigPetstore File and REST Load Generation.
-    * [BIGTOP-1806] - Integrate Apache Ignite (incubating) with BigTop
-    * [BIGTOP-1918] - Add product enumeration to BPS data generator
-    * [BIGTOP-1931] - Add multinomial product purchasing model to BPS Data Generator
-
-** Task
-    * [BIGTOP-1393] - Retire the make build system
-    * [BIGTOP-1525] - FailureExecutor : Is it going to be maintained/used?
-    * [BIGTOP-1556] - Cleanup some unused minor stuff.
-    * [BIGTOP-1595] - kill list
-    * [BIGTOP-1597] - Design flyers for upcoming SCALE conference
-    * [BIGTOP-1638] - Add Dasha Boudnik to dev list
-    * [BIGTOP-1648] - Update to Spark 1x to Spark 1.2.1
-    * [BIGTOP-1655] - Update VM and Docker provisioner to work with hiera converted puppet recipes
-    * [BIGTOP-1726] - Confirm that Spark 1.2.1 runs properly in BigTop, and run the BPS integration test.
-    * [BIGTOP-1755] - Upgrade to Spark 1.3.0
-    * [BIGTOP-1777] - Add cool ASCII-art logo of Bigtop to the build.gradle
-    * [BIGTOP-1822] - Puppet apt module should be automatically installed by toolchain
-    * [BIGTOP-1831] - Upgrade Mahout to 0.10
-    * [BIGTOP-1834] - Upgrade to Spark 1.3.1
-    * [BIGTOP-1854] - Trim Tachyon maintainers' list
-    * [BIGTOP-1873] - Puppet apt module version must be 2.0.1 or above after BIGTOP-1870
-    * [BIGTOP-1897] - Umbrella JIRA for CI overhaul
-
-** Test
-    * [BIGTOP-1377] - Add a test case for performing block corruption recovery
-    * [BIGTOP-1560] - Add a test case for performing block corruption recovery
-    * [BIGTOP-1617] - Make clean-up commands in TestCLI more specific
-    * [BIGTOP-1629] - Move testDFSCLI from hcfs to hdfs directory
-    * [BIGTOP-1664] - make test unpack resources optional
-
-** Wish
-    * [BIGTOP-1619] - change hadoop namenode port to 8020 (apache default port) in puppet script
-
-
-* Release 0.8.0 (2014-09-06)
-
-Release Notes - Bigtop - Version 0.8.0
-
-- SUB-TASK
-    - [BIGTOP-1111] - Bump version of Hadoop to 2.2.0
-    - [BIGTOP-1165] - Ivy config for hive metastore looks incorrect, which breaks build on JDK 7
-    - [BIGTOP-1166] - Hive-jdbc doesn't compile on JDK 7, which breaks build
-    - [BIGTOP-1183] - Rename spark package to spark-core
-    - [BIGTOP-1184] - bump hadoop version to 2.3.0
-    - [BIGTOP-1202] - One-button-click way for installing all needed bits of testing framework
-    - [BIGTOP-1203] - Update toolchaing to automatically install Gradle into dev env.
-    - [BIGTOP-1208] - Add DFSIO test into longevity suite
-    - [BIGTOP-1209] - Add SLive test into longevity suite
-    - [BIGTOP-1242] - bump jsvc to 1.0.15
-    - [BIGTOP-1244] - bump flume to 1.5.0.1
-    - [BIGTOP-1278] - bump pig version to 0.12.1
-    - [BIGTOP-1279] - bump hive version to 0.12.0
-    - [BIGTOP-1280] - bump Oozie version to 4.0.1
-    - [BIGTOP-1281] - bump hbase version to 0.98.2
-    - [BIGTOP-1282] - bump Mahout version to 0.9
-    - [BIGTOP-1283] - bump Solr version to 4.6.0
-    - [BIGTOP-1284] - bump Spark version to 0.9.1
-    - [BIGTOP-1285] - Bump version of Phoenix to 4.0
-    - [BIGTOP-1291] - On a clean ~/.m2 hbase mvn install site will fail as install is executed first
-    - [BIGTOP-1293] - Include Hive 0.13 into the stack
-    - [BIGTOP-1302] - Update hbase smoke tests to escape Hbase's "event horizon"
-    - [BIGTOP-1330] - bump Giraph version to 1.1.0
-    - [BIGTOP-1331] - bump Hue version to 3.6.0
-    - [BIGTOP-1340] - Hbase build has to use hadoop-two.version sys. prop
-    - [BIGTOP-1350] - Bump version of Crunch to 0.10.0
-    - [BIGTOP-1354] - bump version of Solr to 4.9.0
-    - [BIGTOP-1426] - Bump version of Hadoop to 2.4.1
-    - [BIGTOP-1432] - Update HBase version to 0.98.5 in the BOM
-    - [BIGTOP-1433] - Fix Hive build after BIGTOP-1429 
-    - [BIGTOP-1434] - Maven build failing with NoClassDefFoundError at maven-site-plugin:3.0-beta-3:site (packaging-documentation) @ sqoop-docs 
-    - [BIGTOP-1435] - Fix Crunch build after BIGTOP-1429 
-
-- BUG
-    - [BIGTOP-282] - the licensing status of the MovieLens data files needs to be cleared up
-    - [BIGTOP-831] - Hadoop branch-2 (and trunk?) are failing when built with JDK7u11
-    - [BIGTOP-922] - Existing Puppet recipes are failing to initialize HDFS dictories on the first run
-    - [BIGTOP-986] - install-flume.sh uses -x instead of -e
-    - [BIGTOP-996] - Document steps needed to run BT tests on a fresh host
-    - [BIGTOP-997] - TestCLI fails test id 5:  comparator fails
-    - [BIGTOP-1042] - Add support for Hadoop 2.1.x-beta
-    - [BIGTOP-1048] - ZooKeeper installation use "zookeeper-server.pid" as default while ZooKeeper expects zookeeper_server.pid
-    - [BIGTOP-1072] - Vagrant scripts for spinning up and "hydrating" bigtop vms 
-    - [BIGTOP-1092] - Add Hue documentation package
-    - [BIGTOP-1097] - introduce bigtop-groovy package
-    - [BIGTOP-1124] - solrctl should be able to auto-detect JAVA_HOME
-    - [BIGTOP-1125] - Return value does not reflect status checks
-    - [BIGTOP-1128] - FIX and modularize mahout sample data sets
-    - [BIGTOP-1132] - Phoenix RPM specfile does not create symlinks
-    - [BIGTOP-1141] - Add TestYarnNode to test yarn node 
-    - [BIGTOP-1143] - Oozie configuration file is incomplete
-    - [BIGTOP-1146] - Flume NG package is broken because of org.hbase.asynchbase deps. mismanagement
-    - [BIGTOP-1150] - update Spark debian packaging to the latest format
-    - [BIGTOP-1153] - Crunch package doesn't depend on anything
-    - [BIGTOP-1164] - Get whole Bigtop stack building on JDK 7
-    - [BIGTOP-1167] - got "Permission denied" when creating vagrant home folder in provision.sh
-    - [BIGTOP-1168] - hue build fails (unresolvable maven dependencies)
-    - [BIGTOP-1169] - Hue rpm build fails
-    - [BIGTOP-1174] - missing dependency setting on init-hdfs.sh in puppet recipes
-    - [BIGTOP-1181] - Add pyspark to spark package
-    - [BIGTOP-1187] - Spark service packages have extraneous core in the name
-    - [BIGTOP-1191] - master pom.xml file formatting is out of whack
-    - [BIGTOP-1197] - iTest PackageManagerTests are failing
-    - [BIGTOP-1199] - Makefile for deb packing needs to support zip archives
-    - [BIGTOP-1205] - GRADLE_OPTS missing closing " in toolchain jenkins.sh.*
-    - [BIGTOP-1217] - Spark test-artifact missing repository for install
-    - [BIGTOP-1218] - Add JDK7 to Bigtop toolchain
-    - [BIGTOP-1220] - Gradle task order isn't enforced properly with dependsOn
-    - [BIGTOP-1221] - Expand and updated FUSE tests
-    - [BIGTOP-1223] - TestSlive does not fully remove root directory after an iteration
-    - [BIGTOP-1225] - TestHDFSQuota can fail if tests are run out of order
-    - [BIGTOP-1226] - TestSLive setup() already being used, need to rename
-    - [BIGTOP-1232] - test artifact build fails in io-longevity
-    - [BIGTOP-1234] - TestFileAppend shouldn't reuse client during testMultipleOutputStreamFailure()
-    - [BIGTOP-1236] - TestCLI expecting permissions incorrectly 
-    - [BIGTOP-1238] - TestCLI has misspelled 'NAMNEODE' that is used without protocol name
-    - [BIGTOP-1239] - TestCLI: make ports optional in regex comparators
-    - [BIGTOP-1246] - Fix the shell-object bug in HttpFs smoke tests
-    - [BIGTOP-1250] - TestTextSnappy fails
-    - [BIGTOP-1252] - Update ant from 1.9.2 to 1.9.3
-    - [BIGTOP-1253] - TestSLive calling setpDir() instead of setupDir()
-    - [BIGTOP-1254] - add gradle specific files to .gitignore
-    - [BIGTOP-1255] - init-hcfs.json needs to encode privileges as strings, not numbers.
-    - [BIGTOP-1256] - Spark package deployment should reflect BIGTOP-1183
-    - [BIGTOP-1258] - Spark puppet deploy might fail as /etc/spark can not be created
-    - [BIGTOP-1259] - spark-worker service should be started after spark-master if ran on the same node
-    - [BIGTOP-1260] - Puppet deployment should reflect on the new name of spark package
-    - [BIGTOP-1268] - Jenkins Job Bigtop-trunk-Hadoop failed from 3/15/2014 due to protoc version
-    - [BIGTOP-1274] - yarn ResourceManager should be started before starting NodeManagers
-    - [BIGTOP-1290] - Zookeeper build is failing on Ant 1.9.3
-    - [BIGTOP-1294] - asciidoc is missed from centos and sles environments of bigtop_toolchain
-    - [BIGTOP-1295] - package.mk misteriously missing mkdir for RPMS that leads to SUSE build to fail
-    - [BIGTOP-1300] - need to bump ant version to 1.9.4 in bigtop_toolchain
-    - [BIGTOP-1303] - Pig 0.12.1 build is broken at the site target. Stack build can't proceed
-    - [BIGTOP-1304] - bigtop_toolchain needs to install Scala 2.10 to support Spark build
-    - [BIGTOP-1306] - RPM licensing metadata is inconsistent and wrong
-    - [BIGTOP-1313] - Ant 1.9.4 update is missed from ant.pp recipe.
-    - [BIGTOP-1319] - hive-rpm target is broken
-    - [BIGTOP-1322] - hbase_install.sh shall be called with bash
-    - [BIGTOP-1324] - Fix typo in testHDFSConf.xml of TestCLI
-    - [BIGTOP-1336] - Puppet recipes failed to deploy kerberos enabled hadoop cluster
-    - [BIGTOP-1337] - Logging success messages in service startup and shutdown only after verification.
-    - [BIGTOP-1345] - remove bigtop-packages/src/root@mgrover-centos6-2.ent.cloudera.com
-    - [BIGTOP-1346] - Generalize evaluation regexp in packages.gradle
-    - [BIGTOP-1355] - Hive 0.13 deb build is broken
-    - [BIGTOP-1358] - adjust toolchain to use JDK7 by default
-    - [BIGTOP-1359] - fix a few issues with our puppet_toolchain code
-    - [BIGTOP-1371] - Spark-master service fails to start due to missing /usr/lib/spark/work
-    - [BIGTOP-1380] - Pig might run out of memory during site construction
-    - [BIGTOP-1382] - Hadoop site.csv components additions: yarn & mapred-app
-    - [BIGTOP-1390] - JDK Woes
-    - [BIGTOP-1394] - package test manifest got out of date
-    - [BIGTOP-1396] - update metadata for bigtop-groovy and phoenix
-    - [BIGTOP-1397] - hive-hbase package is missing after move to the latest Hive
-    - [BIGTOP-1398] - hadoop-hdfs-fuse can't be installed on Ubuntu 14.04
-    - [BIGTOP-1399] - add --help to alternative invocation to support newer Ubuntus
-    - [BIGTOP-1400] - Whirr is failing with PermGen error: needs to be bumped up
-    - [BIGTOP-1401] - Create man pages for YARN, HDFS and MAPRED. 
-    - [BIGTOP-1402] - Update bigtop gradle to 2.x
-    - [BIGTOP-1405] - Gradle 2.0 installation from toolchain is broken
-    - [BIGTOP-1415] - Remove redundant jars from spark package.
-    - [BIGTOP-1418] - [BigPetStore] build.gradle - wrong variable name (hadoopClusterRuntime instead of runtime)
-    - [BIGTOP-1420] - Update Phoenix version to 4.1.0 in the BOM
-    - [BIGTOP-1421] - Phoenix build is missing some variables
-    - [BIGTOP-1425] - gradle build can fail with NPE in an obscure way
-    - [BIGTOP-1427] - HBase build should build hbase-***-hadoop2 by default
-    - [BIGTOP-1428] - update tests to reflect new home for hbase user
-    - [BIGTOP-1429] - Apache build of Bigtop test artifacts fails to find correct Hbase jars
-    - [BIGTOP-1430] - incorrect version of spark-core prevents test artifacts from compiling
-    - [BIGTOP-1438] - asciidoc is no longer needed by Sqoop
-    - [BIGTOP-1439] - split jdk7 and jdk6 in Bigtop toolchain
-    - [BIGTOP-1440] - a few usability updates to bigtop_toolchain
-    - [BIGTOP-1441] - bigtop_toolchain fails on Ubuntu
-    - [BIGTOP-1442] - need to add proper clean up for the toolchain puppet code
-    - [BIGTOP-1444] - Phoenix RPM build is broken after BIGTOP-1421
-
-- IMPROVEMENT
-    - [BIGTOP-840] - Refactor Bigtop puppet code for better integration with newer Puppet and Whirr
-    - [BIGTOP-848] - Allow to build stack on top of an arbitrary Hadoop SHA
-    - [BIGTOP-952] - init-hdfs.sh is dog slow. Let's replace it with a direct HDFS API calls and better layout management
-    - [BIGTOP-992] - Create HowToContribute wiki page
-    - [BIGTOP-1019] - Remove mysql requirement constraint from sqoop tests
-    - [BIGTOP-1026] - Big Top Mapred/Itest assertions should report specific commands which fail
-    - [BIGTOP-1029] - Add support for HBase 0.96+
-    - [BIGTOP-1085] - spark packages needs to be split
-    - [BIGTOP-1110] - Define BOM for 0.8.0 release of Bigtop
-    - [BIGTOP-1130] - Update Build Requirements on our web pages
-    - [BIGTOP-1137] - provide a way to request a major version of JDK
-    - [BIGTOP-1138] - hadoop-conf-pseudo package uses the deprecated fs.default.name parameter
-    - [BIGTOP-1139] - bigtop-tomcat and bigtop-jsvc don't depend on bigtop-utils
-    - [BIGTOP-1144] - Need to update command 'puppet apply' in file bigtop-deploy/puppet/README.md
-    - [BIGTOP-1147] - README.md improvments
-    - [BIGTOP-1152] - speed up spark build
-    - [BIGTOP-1155] - New logo ! 
-    - [BIGTOP-1161] - Have a way to dynamically select components to be deployed by Puppet recipes.
-    - [BIGTOP-1163] - puppet apply shows db init error if the oozie server is already running
-    - [BIGTOP-1171] - Puppetize the Vagrant deployer
-    - [BIGTOP-1178] - Clusterize the puppetized vagrant deployer.
-    - [BIGTOP-1195] - Higher level interface to smokes.
-    - [BIGTOP-1200] - Implement Generic Text File to define HCFS filesystem semantics
-    - [BIGTOP-1201] - Enhance (gradleize) the build to ease development, deployment; abstract implementation
-    - [BIGTOP-1207] - Create a foundation layer to build longevity and performance tests for the stack
-    - [BIGTOP-1210] - integrate BIGTOP-952 with plain text HCFS FS definition from  BIGTOP-1200
-    - [BIGTOP-1213] - TestHadoopExamples smokes: orderering of tests; other improvements
-    - [BIGTOP-1222] - Simplify and gradleize a subset of the bigtop smokes
-    - [BIGTOP-1224] - Provide a simple order for tests
-    - [BIGTOP-1241] - BIGTOP should update protobuf to 2.5 from 2.4.x 
-    - [BIGTOP-1257] - Make TestHadoopExamples modular enough to support longevity tests.
-    - [BIGTOP-1269] - BigPetStore: Create build w/ gradle
-    - [BIGTOP-1289] - Update component versions in the pom.xml (including hadoop)
-    - [BIGTOP-1296] - Fix permission and Update README for vagrant-puppet recipe
-    - [BIGTOP-1311] - allow easy site publishing
-    - [BIGTOP-1312] - Add dynamic menus, update, links, modern CSS to bigtop site.
-    - [BIGTOP-1314] - Deprecate make build
-    - [BIGTOP-1316] - enhance Shell for better checking of return code & output logging
-    - [BIGTOP-1321] - TestCLI: check assumptions before the test run
-    - [BIGTOP-1342] - Make TestCLI usable for both HDFS and HCFS
-    - [BIGTOP-1347] - Support better entropy performance on vagrant VMs
-    - [BIGTOP-1348] - vagrant-puppet provisioning is too slow on installing packages
-    - [BIGTOP-1372] - Bigtop needs feature that takes in multiple arguments to build specific components at command line
-    - [BIGTOP-1374] - Add dynamic override of BOM variables into Gradle build
-    - [BIGTOP-1391] - Improve README.md so that the markdown rendering is pretty and easy to read
-
-- NEW FEATURE
-    - [BIGTOP-1039] - Add packaging for Shark analytics 
-    - [BIGTOP-1089] - BigPetStore: A polyglot big data processing blueprint
-    - [BIGTOP-1188] - Smoke tests for HttpFs
-    - [BIGTOP-1192] - Add utilities to facilitate cluster failure testing into bigtop-test-framework
-    - [BIGTOP-1212] - Pick or build a framework for building fake data sets
-    - [BIGTOP-1272] - BigPetStore: Productionize the Mahout recommender
-    - [BIGTOP-1351] - Init script for HBase Thrift2 server is missing
-
-- TASK
-    - [BIGTOP-236] - The user installed by hbase gets an incorrect home directory
-    - [BIGTOP-1240] - Formatting Guidelines
-
-- TEST
-    - [BIGTOP-1040] - Add integration tests for Shark component
-
-
-* Release 0.7.0 (2013-10-28)
-
-- SUB-TASK
-  - [BIGTOP-932] - Allow puppet recipes to automatically pull in needed files from web
-  - [BIGTOP-964] - Bump hive version to 0.11
-  - [BIGTOP-993] - Add packaging for Phoenix
-  - [BIGTOP-1053] - Bump version of Hadoop to 2.0.6-alpha
-  - [BIGTOP-1073] - Bump version of HBase to 0.94.11
-  - [BIGTOP-1074] - Bump version of Hue to 2.5.0
-  - [BIGTOP-1075] - bump version of Flume to 1.4.0
-  - [BIGTOP-1076] - bump version of Crunch to 0.7.0
-  - [BIGTOP-1077] - Bump version of Hive to 0.11.0
-  - [BIGTOP-1078] - bump version of Datafu to 1.0.0
-  - [BIGTOP-1079] - bump version of Solr to 4.5.0
-  - [BIGTOP-1083] - Spark version was changed with incubating process.
-  - [BIGTOP-1101] - Bump version of HBase to 0.94.12
-
-- BUG
-  - [BIGTOP-990] - /usr/lib/bigtop-utils/ should be owned by bigtop-utils package
-  - [BIGTOP-998] - Consolidate Hue packages
-  - [BIGTOP-1005] - Create versionless symlinks in hadoop client directory
-  - [BIGTOP-1006] - Explicitly state the working directory to be used in init scripts instead of relying on user's home directories
-  - [BIGTOP-1008] - /usr/lib/bigtop-utils directory is not owned by bigtop-utils package in RPMs
-  - [BIGTOP-1010] - libsnappy not included in the final hadoop RPM
-  - [BIGTOP-1011] - bigtop-detect-javahome has a quirky search order
-  - [BIGTOP-1017] - Revert a commit that introduces some unneeded dependencies
-  - [BIGTOP-1027] - 'bigtop-detect-classpath' helper script missing from bigtop-utils
-  - [BIGTOP-1033] - itest-common: fix debian install to avoid conffile prompt
-  - [BIGTOP-1034] - Custom user's home directory's creation fails in init-hdfs.sh
-  - [BIGTOP-1036] - Hue's oozie app package should depend on hue jobsub package
-  - [BIGTOP-1037] - Provide a mechanism to control the sourcing of defaults files
-  - [BIGTOP-1044] - hive and hive-jdbc versions should be kept in sync
-  - [BIGTOP-1045] - Be consistent with shells, bigtop-detect-javahome, and bigtop-utils versions
-  - [BIGTOP-1046] - Pig's use of BIGTOP_DEFAULTS_DIR is evaluated at the wrong time
-  - [BIGTOP-1051] - Spark archive should come in tar.gz format
-  - [BIGTOP-1052] - Increase environment configurability/debugging of Mahout Tests
-  - [BIGTOP-1055] - Spark package doesn't create PREFIX/bin directory and sets wrong permissions
-  - [BIGTOP-1059] - Add path option for pig.jar to pig smoke pom.xml
-  - [BIGTOP-1070] - Add BIGTOP_CLASSPATH functionality to tomcat deployment
-  - [BIGTOP-1071] - bigtop-detect-javahome should select the first possible candidate in the list
-  - [BIGTOP-1081] - deb spark packing needs to set SOURCE_DIR
-  - [BIGTOP-1082] - spark package tests are missing
-  - [BIGTOP-1084] - spark build on CentOS 5 and SLES11 is broken
-  - [BIGTOP-1086] - fix miscellaneous failures in package tests
-  - [BIGTOP-1087] - flume rpm package needs to own /usr/lib/flume/tools
-  - [BIGTOP-1088] - oozie packages need to own /etc/oozie/conf.dist/tomcat-deployment.{default,secure}
-  - [BIGTOP-1090] - hue packaging needs to handle mutable bits (app.reg and hue.pth)
-  - [BIGTOP-1094] - spark SPEC mistakenly re-defines alternatives command on SLES/OpenSUSE 
-  - [BIGTOP-1095] - oozie  packaging doesn't remove tomcat alternatives
-  - [BIGTOP-1096] - alternatives within the alternatives-managed sub-directory could be harmful
-  - [BIGTOP-1098] - now that Spark has had its first incubator release we have to adjust bigtop accordingly
-  - [BIGTOP-1099] - sync up the versions used for smoke tests with the BOM versions
-  - [BIGTOP-1100] - the new Tomcat deployment style would benefit from a few usability tweaks
-  - [BIGTOP-1103] - Remaining fixes of BIGTOP_DEFAULTS_DIR evaluation
-  - [BIGTOP-1107] - Update to the official Hue 2.5.1 release
-  - [BIGTOP-1108] - Solr 4.5.0 has been formally released and we should start using the official release bits
-  - [BIGTOP-1112] - sqoop-client should require bigtop-utils
-  - [BIGTOP-1114] - Hue metastore app is missing from core registration and Debian packages
-  - [BIGTOP-1115] - update package test manifest for Bigtop 0.7.0
-  - [BIGTOP-1117] - hive-hcatalog integration has a few issues
-  - [BIGTOP-1119] - phoenix DEB packages differ in a few ways from RPM counterparts
-
-- IMPROVEMENT
-  - [BIGTOP-927] - Update puppet recipes to support Ubuntu
-  - [BIGTOP-938] - Upgrade to Junit 4.11
-  - [BIGTOP-955] - HBase installation should advertise its location and configuration
-  - [BIGTOP-1004] - Hadoop packages do not include version independent symlinks
-  - [BIGTOP-1021] - Update HBase version to 0.94.9
-  - [BIGTOP-1031] - README has outdated/ambiguous information
-  - [BIGTOP-1041] - Add Puppet support for Spark cluster deployment
-  - [BIGTOP-1056] - some changes are introduced into Spark build: needs to be updated here
-  - [BIGTOP-1063] - Add Scala 2.9.3 to bigtop-toolchain
-  - [BIGTOP-1069] - Remove descriptions about JDK5 dependency
-  - [BIGTOP-1080] - Change /usr/bin scripts to be alternatives instead of flat files
-  - [BIGTOP-1106] - update our puppet deployment topology
-
-- NEW FEATURE
-  - [BIGTOP-811] - Add /var/lib/bigtop as a location to install SQL connectors and other plug-ins
-  - [BIGTOP-1023] - Define BOM for 0.7.0 release of Bigtop
-
-- TASK
-  - [BIGTOP-715] - Add Spark packaging
-  - [BIGTOP-939] - Make usage of bigtop-tomcat more dynamic
-
-- TEST
-  - [BIGTOP-719] - Add TestTextSnappy to test hadoop fs -text with snappy compressed files
-  - [BIGTOP-1030] - Develop integration tests for new Spark component
-  - [BIGTOP-1057] - Add TeraGen / TeraSort Benchmakring
-
-* Release 0.6.0 (2013-06-10)
-
-- SUB-TASK
-  - [BIGTOP-239] - HBase init scripts are shut down in the wrong order
-  - [BIGTOP-812] - HttpFS is using Tomcat 6.0.32 which has numerous vulnerabilities
-  - [BIGTOP-813] - add integration test for HCatlog
-  - [BIGTOP-814] - add packaging test for HCatlog
-  - [BIGTOP-822] - Bump version of Hadoop to 2.0.5
-  - [BIGTOP-824] - Bump version of Hive to 0.10
-  - [BIGTOP-832] - Bump version of Hue to 2.3.0
-  - [BIGTOP-833] - Update HCatalog packaging for Bigtop 0.6.0 integration
-  - [BIGTOP-839] - add packaging test for Sqoop 2
-  - [BIGTOP-852] - Improve HDFS helper script
-  - [BIGTOP-854] - add package testing manifest for HCatalog
-  - [BIGTOP-855] - add integration tests for HCatalog
-  - [BIGTOP-861] - init-hdfs.sh does not work from init scripts
-  - [BIGTOP-862] - All the Apache Hadoop services ought to start nicely and in order
-  - [BIGTOP-863] - verify the existence of the initscripts with typo
-  - [BIGTOP-864] -  update the appliance for the location of the init-hdfs.sh script
-  - [BIGTOP-880] - Bump version of Datafu to 0.0.6
-  - [BIGTOP-891] - TestPigTest doesn't run against YARN cluster
-  - [BIGTOP-915] - bump version of Oozie to 3.3.2
-  - [BIGTOP-916] - Bump version of Pig to 0.11.1
-  - [BIGTOP-917] - Bump version of Solr to 4.2.1
-  - [BIGTOP-926] - Bump version of Giraph to 1.0
-  - [BIGTOP-933] - Need to add ASF license text to bigtop-toolchain files
-  - [BIGTOP-962] - Bump Sqoop version to 1.99.2
-  - [BIGTOP-963] - bump version of Whirr to 0.8.2
-  - [BIGTOP-980] - bigtop-toolchaing user module can't create 'jenkins' user
-
-- BUG FIXES
-  - [BIGTOP-275] - scoping of variable in puppet code is incorrect
-  - [BIGTOP-379] - Package testing for Bigtop 0.5.0 release
-  - [BIGTOP-445] - yarn default file has the wrong name
-  - [BIGTOP-463] - should we reconsider /usr/lib vs. /usr/libexec decision?
-  - [BIGTOP-483] - Smoke test of Hadoop fails with clitest missing testConf.xml file
-  - [BIGTOP-489] - TestCLI with two test cases fails ID - 146 and 147 related to refreshServiceAcl
-  - [BIGTOP-504] - provide a functionality for detecting libjvm.so in bigtop-utils
-  - [BIGTOP-545] - package testing manifest in trunk needs to be updated
-  - [BIGTOP-553] - Metadata for packages needs to be harmonized between RPM and Debain
-  - [BIGTOP-584] - Fix hive smoke test failures in trunk
-  - [BIGTOP-624] - running start on a service already running should return success
-  - [BIGTOP-637] - Update boxgrinder appliance for the coming release
-  - [BIGTOP-786] - Not able to find wiki link on Bigtop homepage
-  - [BIGTOP-804] - org.apache.bigtop.itest.pmanager.PackageManagerTest.testLookupGcc fails on some systems
-  - [BIGTOP-808] - hadoop-conf-pseudo package breaks hadoop-conf alternatives during upgrades
-  - [BIGTOP-809] - Remove harmless scary message from solr build
-  - [BIGTOP-815] - Move the trunk forward now that 0.5.0 is released
-  - [BIGTOP-818] - Updated list of supported platforms on Bigtop's homepage and readme file
-  - [BIGTOP-823] - Remove obsolete slf4j removal in hive packaging
-  - [BIGTOP-826] - Wrong return code returned when stopping Hue on RHEL/SUSE
-  - [BIGTOP-827] - Ubuntu Install Instructutions Fail with a 404
-  - [BIGTOP-829] - Hue status returned wrong information on SLES
-  - [BIGTOP-830] - Add $(APACHE_MIRROR) for bigtop-tomcat
-  - [BIGTOP-834] - Remove workaround for MAPREDUCE-4814 in historyserver
-  - [BIGTOP-835] - The shell exec method must have variants which have timeout and can run in background
-  - [BIGTOP-836] - Hue status on SuSE doesn't work when run from outside of /usr/bin
-  - [BIGTOP-837] - Oozie 3.3.0 can't be build against Hadoop 2.0.3 or later
-  - [BIGTOP-841] - hadoop-conf-pseudo missed configuration for capacity scheduler and historyserver
-  - [BIGTOP-842] - hadoop-mapreduce-historyserver service dumps some output to the console instead of its .out file 
-  - [BIGTOP-844] - hadoop rpm upgrade sequence is broken
-  - [BIGTOP-846] - The helper script introduced in BIGTOP-547 doesn't create /user/$USER directories on HDFS
-  - [BIGTOP-850] - Remove non-ASF repositories and infrastructure references from the project files
-  - [BIGTOP-851] - Typo breaks detection of multiple libraries in bigtop-utils in some JVMs
-  - [BIGTOP-853] - HBase test, TestLoadAndVerify does not work on secure clusters
-  - [BIGTOP-857] - Inconsistent group ownership of /var/run/hadoop-hdfs between .deb and .rpm installs
-  - [BIGTOP-858] - HBase tests are no longer compile with HBase 0.94
-  - [BIGTOP-859] - fix the Bigtop trunk package test
-  - [BIGTOP-869] - hadoop preun should not stop services itself on behalf of its subpackages
-  - [BIGTOP-870] - Bump version of Pig to 0.11
-  - [BIGTOP-873] - Hive needs Ivy cache cleared
-  - [BIGTOP-874] - make test contracts show up in javadoc
-  - [BIGTOP-875] - hadoop-smoke is broken because of missed dependency
-  - [BIGTOP-876] - TestDFSAdmin in hadoop-smoke is outdated and still relies on presence of -upgradeProgress
-  - [BIGTOP-877] - TestDFSAdmin in hadoop-smoke has to ensure its own environment, e.g. create HDFS paths, etc.
-  - [BIGTOP-878] - Bump version of Oozie to 3.3.1
-  - [BIGTOP-879] - Bump version of Solr to 4.2.0
-  - [BIGTOP-883] - Add support for Oracle RPM Java 1.7
-  - [BIGTOP-884] - hive smoke tests can't be executed as a jar file isn't present anymore
-  - [BIGTOP-885] - TestHiveSmokeBulk fails on Hive 0.9
-  - [BIGTOP-886] - Non-fatal errors when starting Hive daemons in EL5
-  - [BIGTOP-887] - Sqoop 2 should be consistent with Oozie / Bigtop for server plugins directory
-  - [BIGTOP-888] - Oozie losing value of JAVA_HOME when spawning sub-shell
-  - [BIGTOP-889] - pig smoke tests are no longer running
-  - [BIGTOP-890] - Remove workaround for distributed cache problem from Oozie package
-  - [BIGTOP-892] - hadoop packages misses capacity-scheduler configuration
-  - [BIGTOP-894] - Pig compilation fails on RPM systems on Bigtop trunk
-  - [BIGTOP-895] - A number of testcases in TestCLI are failing with (at least) Hadoop 2.0.3 and later
-  - [BIGTOP-896] - Solr rpm requires redhat-lsb to run
-  - [BIGTOP-899] - enabled deployment topology with 3 nodes
-  - [BIGTOP-900] - update deployment configs for Kerberized installations
-  - [BIGTOP-901] - Oozie smoke tests mightn't be able to obtain the jobtracker or namenode hostnames, using old config. names 
-  - [BIGTOP-902] - Hive uses same file for log4j and standard output
-  - [BIGTOP-903] - crunch integration tests need to clean up output directory before each individual subtask is run
-  - [BIGTOP-904] - mahout tests should default to /tmp as a temp location
-  - [BIGTOP-905] - bump versions of test execution poms to match the BOM
-  - [BIGTOP-906] - a few tests need artifacts compiled against Hadoop 2.X
-  - [BIGTOP-907] -  hadoop-mapreduce-historyserver should require hadoop-hdfs
-  - [BIGTOP-910] - datanode init script cannot stop the process bcause it cannot find the pid file
-  - [BIGTOP-912] - update HBase tests for 0.6.0
-  - [BIGTOP-914] - puppet needs to provision test users
-  - [BIGTOP-918] - Move BOM version info to the root pom file
-  - [BIGTOP-920] - Add HCatalog tests
-  - [BIGTOP-923] - Packages that depend on bigtop-utils needs to specify >= 0.6
-  - [BIGTOP-928] - Hive can't be build against non-published Hadoop artifacts
-  - [BIGTOP-929] - Pig component build needs to use internal resolvers
-  - [BIGTOP-930] - init-hdfs.sh fails on execution
-  - [BIGTOP-934] - Hidden cookie is being set by wiki.cloudera.org via http://blogs.apache.org/bigtop top page
-  - [BIGTOP-935] - Use init script templates for hive
-  - [BIGTOP-936] - Use init script templates for hbase
-  - [BIGTOP-937] - Hive does not restart on Ubuntu
-  - [BIGTOP-940] - fix issues with Hive package
-  - [BIGTOP-945] - Service lock files need to match init script name
-  - [BIGTOP-946] - solr-server runlevels are wrong in debian
-  - [BIGTOP-947] - oozie init script does not use JAVA_HOME in /etc/default/bigtop-utils
-  - [BIGTOP-948] - TestFileAppend and TestHDFSQuota tests need to be updated
-  - [BIGTOP-949] - Add Sqoop tests
-  - [BIGTOP-950] - race condition for output consumption in Shell code
-  - [BIGTOP-951] - Hive Smoke's TestJdbcDriver fails
-  - [BIGTOP-953] - Revert BIGTOP-835 and BIGTOP-950
-  - [BIGTOP-954] - metadada and user data for sqoop and hcatalog packages are wrong
-  - [BIGTOP-956] - fix small issues with mahout, hue and oozie tests
-  - [BIGTOP-957] - update deployment code for Sqoop2
-  - [BIGTOP-958] - update deployment code for HCatalog
-  - [BIGTOP-959] - get rid of hadoop specific init.d.tmpl
-  - [BIGTOP-960] - Remove workaround for HBASE-6263 in hbase-thrift start
-  - [BIGTOP-968] - oozie is now part of the core hue
-  - [BIGTOP-969] - a few files belong to more than one package
-  - [BIGTOP-970] - update package test manifest and fix small issues in package metadata for Bigtop 0.6.0
-  - [BIGTOP-971] - take care of the rat licensing complaints
-  - [BIGTOP-974] - Crunch download location is no longer /incubator
-  - [BIGTOP-975] - HBase regionserver init script does not implement "condrestart" command
-  - [BIGTOP-981] - Provide minimal documentation for puppet deploy config file
-  - [BIGTOP-982] - hcatalog needs to be given HCAT_HOME and HIVE_HOME
-  - [BIGTOP-983] - Oozie-client package ships its docs twice
-  - [BIGTOP-984] - bigtop-utils doesn't detect JAVA_HOME for Oracle JDK 7 when package by Debian's tool
-  - [BIGTOP-985] - Create Hive JDBC package
-  - [BIGTOP-988] - HCatalog does not launch daemon process in a known directory
-  - [BIGTOP-991] - crunch archive is incorrect
-  - [BIGTOP-994] - hdfs-init missed a few permissions on the sub-directories
-  - [BIGTOP-997] - TestCLI fails test id 5:  comparator fails
-  - [BIGTOP-1001] - need package tests for hive-jdbc package
-  - [BIGTOP-1002] - Complete porting Sqoop import tests to Sqoop 2
-
-- IMPROVEMENT
-  - [BIGTOP-12] - Add HCatalog to Bigtop
-  - [BIGTOP-547] - create an HDFS setup helper script
-  - [BIGTOP-712] - Bigtop 0.5.0 release
-  - [BIGTOP-713] - use newer debhelper and source format 3.0 (quilt) for Debian and Ubuntu packaging
-  - [BIGTOP-756] - Provide the thrift API interface file in the HBase packages
-  - [BIGTOP-763] - Add hdfs-journalnode to hadoop.spec
-  - [BIGTOP-817] - Wiki breadcrumb still shows Bigtop(incubating)
-  - [BIGTOP-821] - Puppet README's Usage section cuts off because of angle bracket delimiter
-  - [BIGTOP-838] - Both conf.empty and conf.pseudo packages don't seem to include a sample hadoop-env.sh file
-  - [BIGTOP-843] - It would be really cool if bigtop-detect-javahome were smarter and didn't pick a 32-bit JVM, or a 1.7 JVM set with default
-  - [BIGTOP-882] - Upload content of Oozie sharelib to HDFS
-  - [BIGTOP-893] - migrate puppet code to use init-hdfs.sh
-  - [BIGTOP-931] - a few improvements to secure puppet deployment code
-  - [BIGTOP-961] - hcatalog should be re-using hive user when running
-  - [BIGTOP-967] - package removal is currently missing from our package tests
-  - [BIGTOP-973] - Improve error diagnostics when sed'ing Hadoop version in Oozie package
-  - [BIGTOP-995] - JDK bug 6979329 requires a redundant kinit -R
-  - [BIGTOP-1000] - Remove remaining references to the Incubator
-
-- NEW FEATURE
-  - [BIGTOP-732] - Support running multiple HBase region servers
-  - [BIGTOP-881] - Add support for Oozie to be configured with SSL (HTTPS)
-
-- TASK
-  - [BIGTOP-118] - Check to ensure our build system requirements are actually declared properly in the packaging.
-  - [BIGTOP-681] - Package tests needed for new DataFu package
-  - [BIGTOP-720] - Build environment automation
-  - [BIGTOP-760] - Push artifacts from non-Maven projects into local Maven cache
-  - [BIGTOP-805] - Add Sqoop 2 to bigtop
-
-- TEST
-  - [BIGTOP-621] - Add test for distcp - intra cluster
-  - [BIGTOP-625] - Add HDFS Append Test
-  - [BIGTOP-693] - Add hadoop-fuse-dfs test
-  - [BIGTOP-728] - add datafu integration test
-
-* Release 0.5.0 (2012-12-25)
-
-- SUB-TASK
-  - [BIGTOP-39] - Include Apache Avro in Bigtop
-  - [BIGTOP-41] - Add Apache Thrift to Bigtop
-  - [BIGTOP-65] - Package Apache Forrest
-  - [BIGTOP-225] - Exit consistently in init scripts for stop on stopped service
-  - [BIGTOP-232] - create an example .ks file for Bigtop
-  - [BIGTOP-237] - Oozie init scripts are way too verbose
-  - [BIGTOP-239] - HBase init scripts are shut down in the wrong order
-  - [BIGTOP-240] - hbase initscripts should be more explicit
-  - [BIGTOP-302] - 1. Start a separate Wiki page on Installing Bigtop in VMs (VirtualBox) (make sure to record the fact that one needs >> 2GB of RAM)    2. Record the experience with 32bit OSes 
-  - [BIGTOP-380] - Initscripts have misleading messages
-  - [BIGTOP-533] - BigTop Hama Integration Tests
-  - [BIGTOP-551] - Updates for packaging manifest
-  - [BIGTOP-612] - Add Crunch to Bigtop
-  - [BIGTOP-692] - Bump Whirr to 0.8.1
-  - [BIGTOP-695] - Make variables used in flume build/install scripts consistent
-  - [BIGTOP-696] - Make variables used in oozie build/install scripts consistent
-  - [BIGTOP-697] - Make variables used in zookeeper build/install scripts consistent
-  - [BIGTOP-700] - Make variables used in whirr build/install scripts consistent
-  - [BIGTOP-702] - Make variables used in datafu build/install scripts consistent
-  - [BIGTOP-703] - Make variables used in jsvc build/install scripts consistent
-  - [BIGTOP-704] - Make variables used in pig build/install scripts consistent
-  - [BIGTOP-705] - Make variables used in giraph build/install scripts consistent
-  - [BIGTOP-706] - Make variables used in hive build/install scripts consistent
-  - [BIGTOP-707] - Make variables used in tomcat build/install scripts consistent
-  - [BIGTOP-711] - update hadoop version to 2.0.2-alpha 
-  - [BIGTOP-714] - HDFS Fuse build broken for 0.23.3
-  - [BIGTOP-733] - Bump Hue to 2.1.0
-  - [BIGTOP-735] - Upgrade to pull Solr 4.0 when it's officially released
-  - [BIGTOP-736] - When Solr is integrated, we'll need some tests.
-  - [BIGTOP-744] - add package test for Solr
-  - [BIGTOP-750] - Bump Zookeeper to 3.4.5
-  - [BIGTOP-751] - Bump HBase to 0.94.2
-  - [BIGTOP-752] - Bump Sqoop to 1.4.2
-  - [BIGTOP-753] - Bump Flume to 1.3.0
-  - [BIGTOP-765] - Create deployment code for Solr
-  - [BIGTOP-791] - Bump Oozie to 3.3.0
-
-- BUG
-  - [BIGTOP-51] - Layout of Bigtop repository can be optimized
-  - [BIGTOP-79] - Audit RPM package dependencies for programs invoked from install/uninstall scripts
-  - [BIGTOP-98] - Ability to force ivy/maven version inter-dependency needs to be implemented
-  - [BIGTOP-105] - hadoop services can not be started with the default configs in place
-  - [BIGTOP-106] - review and cleanup install/unistall/upgrade time dependencies
-  - [BIGTOP-107] - make Bigtop packaging work in kick-start environment
-  - [BIGTOP-135] - make jsvc use detection more robust on SLES
-  - [BIGTOP-168] - all of the packages that link to hadoop-core need to also link commons-configuration-1.6.jar
-  - [BIGTOP-172] - Hadoop init.d scripts are flaky on stop
-  - [BIGTOP-177] - Remove or fix versioned dependencies between packages of Bigtop
-  - [BIGTOP-187] - some packages ship log files
-  - [BIGTOP-200] - /var/lib/hadoop is set to 777 on installation it should just be group writable
-  - [BIGTOP-213] - We should use another group than hadoop for system groups
-  - [BIGTOP-218] - we need to figure out which compressors/codecs need to be pulled into Bigtop
-  - [BIGTOP-219] - Pig/HBase integration is not working
-  - [BIGTOP-222] - some man pages in /usr/share/doc/flume
-  - [BIGTOP-223] -  install/ initscripts fixes 
-  - [BIGTOP-224] - Some configuration files have their permissions set to 755
-  - [BIGTOP-226] - everything in /usr/lib/pig is set to 755. Non executable files should just be 644
-  - [BIGTOP-227] - whirr has all its files (except its man page) set as 755
-  - [BIGTOP-228] - Whirr should have a symlink to the hadoop's jars it needs
-  - [BIGTOP-229] - pig package install an alternative with an inconsitent name
-  - [BIGTOP-230] - removing packages displays the following extraneous messages
-  - [BIGTOP-256] - Lintian Errors/Warnings - Hadoop Package
-  - [BIGTOP-275] - scoping of variable in puppet code is incorrect
-  - [BIGTOP-279] - we shouldn't be using properties that have been deprecated in config/puppet
-  - [BIGTOP-282] - the licensing status of the MovieLens data files needs to be cleared up
-  - [BIGTOP-292] - BIgtop SRPMs can not be rebuilt
-  - [BIGTOP-307] - README documentation is inaccurate
-  - [BIGTOP-311] - hadoop does not build in off-line mode
-  - [BIGTOP-326] - puppet modules need to be renamed to align with renaming that happens on the package front
-  - [BIGTOP-330] - hadoop 0.23 pseudo conf needs to set more properties to avoid using /tmp as its datadir
-  - [BIGTOP-335] - sqoop-metastore service fails to start because it ends up using /usr/lib/hadoop/bin/hadoop
-  - [BIGTOP-340] - wrapper scripts would benefit from being explicit
-  - [BIGTOP-347] - we need to get rid of workaround for HDFS-1943 in hadoop init.d scripts
-  - [BIGTOP-358] - now that hadoop packages have been split we have to update the dependencies on the downstream packages
-  - [BIGTOP-368] - /usr/bin/hadoop sources /etc/default/yarn unconditionally
-  - [BIGTOP-377] - Clean up packages
-  - [BIGTOP-379] - Package testing for Bigtop 0.5.0 release
-  - [BIGTOP-385] - new artifacts in Hadoop 0.23 need to be properly packaged
-  - [BIGTOP-395] - flume-ng init.d scripts seem to be unhappy on Debian
-  - [BIGTOP-425] - HMaster dies from RemoteException: Server IPC version 5 cannot communicate with client version 4
-  - [BIGTOP-436] - flume-node stop seems to mistarget some other java process on lucid
-  - [BIGTOP-445] - yarn default file has the wrong name
-  - [BIGTOP-463] - should we reconsider /usr/lib vs. /usr/libexec decision?
-  - [BIGTOP-471] - hive server setup after installation fails on creating /var/lock/subsys directory
-  - [BIGTOP-483] - Smoke test of Hadoop fails with clitest missing testConf.xml file
-  - [BIGTOP-486] - separate principals from keytabs in our kerberos module
-  - [BIGTOP-489] - TestCLI with two test cases fails ID - 146 and 147 related to refreshServiceAcl
-  - [BIGTOP-490] - SmokeTest-Mahout test failures
-  - [BIGTOP-492] - make our launcher scripts recognize cascading defaults
-  - [BIGTOP-504] - provide a functionality for detecting libjvm.so in bigtop-utils
-  - [BIGTOP-511] - Add debian repo to bigtop puppet
-  - [BIGTOP-520] - HBase Debian builds run do-component-build twice
-  - [BIGTOP-525] - make sure yarn.dispatcher.exit-on-error is set to true
-  - [BIGTOP-531] - package metadata check error: when description is empty, the summary will append text "description" at the end of summary
-  - [BIGTOP-542] - Issue when installing the new zkfc
-  - [BIGTOP-545] - package testing manifest in trunk needs to be updated
-  - [BIGTOP-549] - update puppet code for the latest state of HA in Hadoop
-  - [BIGTOP-553] - Metadata for packages needs to be harmonized between RPM and Debain
-  - [BIGTOP-566] - Flume NG pkg init script should allow user to customize the location of the conf dir
-  - [BIGTOP-581] - Need to move iTest website to a correct location
-  - [BIGTOP-584] - Fix hive smoke test failures in trunk
-  - [BIGTOP-591] - libhdfs0-dev is missing files
-  - [BIGTOP-615] - Add support for Ambrose Pig job visualization
-  - [BIGTOP-624] - running start on a service already running should return success
-  - [BIGTOP-634] - Weird output when doing an init action on oozie when it is already running
-  - [BIGTOP-637] - Update boxgrinder appliance for the coming release
-  - [BIGTOP-644] - fix the runlevels for a few packages in yum/apt/zypper
-  - [BIGTOP-648] - hbase-thrift cannot be started properly
-  - [BIGTOP-654] - Set nproc for HDFS user
-  - [BIGTOP-658] - Move hbase dependency in hive out of the main package
-  - [BIGTOP-662] - cannot start hue-server
-  - [BIGTOP-663] - cannot start sqoop-metastore in Ubuntu precise
-  - [BIGTOP-664] - hive-metastore script has error when execute 'service hive-metastore status' in Ubuntu precise
-  - [BIGTOP-667] - start hadoop-httpfs in SLES return 3 instead of 0
-  - [BIGTOP-669] - Add DataFu to Bigtop distribution
-  - [BIGTOP-676] - Update HBase version to 0.94.1-rc0
-  - [BIGTOP-679] - fuse_dfs binary has moved due to HDFS-3537
-  - [BIGTOP-684] - copyright header in bigtop-packages/src/common/hadoop/conf.secure/core-site.xml is in wrong place
-  - [BIGTOP-686] - Apache Flume 1.2.0 went out so we should upgrade to it
-  - [BIGTOP-689] - Oozie use of /var/tmp in some CentOS versions create issues
-  - [BIGTOP-691] - flume gets killed too fast when the service is asked to stop
-  - [BIGTOP-694] - Some HBase Scripts Need To Be Excluded From Packaging
-  - [BIGTOP-699] - hadoop-fuse-dfs fails to find libjvm.so when JAVA_HOME happens to be a symlink
-  - [BIGTOP-701] - Bigtop Hadoop package does not include bash tab completion
-  - [BIGTOP-708] - modify Hive integration test to depends on the correct jar file
-  - [BIGTOP-721] - improve the package daemon status check, check twice by some delay if status doesn't match expected value
-  - [BIGTOP-725] - package service test TestServices should only do config once for a group of services
-  - [BIGTOP-729] - zookeeper initd script status doesn't work
-  - [BIGTOP-734] - Proposed fixes to Hive logging configuration
-  - [BIGTOP-740] - improve the package file content tests to ignore platform specific file names
-  - [BIGTOP-745] - fix incorrect javadocs in test framework
-  - [BIGTOP-754] - libhdfs0-dev package is empty
-  - [BIGTOP-755] - YARN_HOME is now HADOOP_YARN_HOME
-  - [BIGTOP-758] - [Pig] Override hadoop-core and hadoop-test artifact versions only if Hadoop < 2.x
-  - [BIGTOP-759] - Flume do-component-build should set hbase.version according to BOM
-  - [BIGTOP-762] - Updates for building Flume versions >= 1.2
-  - [BIGTOP-766] - Make it possible to override Apache URLs for the builds that require them
-  - [BIGTOP-768] - Bump the version of Groovy to 1.8
-  - [BIGTOP-774] - solr build fails on Debian
-  - [BIGTOP-775] - In Groovy 1.8 the behavior of a getters of static fields changed slightly
-  - [BIGTOP-776] - A few updates to Oozie related packaging code to accomodate Oozie's TLP status
-  - [BIGTOP-778] - move Solr app to under /solr in Tomcat deployment and provide more knobs
-  - [BIGTOP-780] - cannot install hive-server in SLES
-  - [BIGTOP-781] - Bigtop packages seem to have a bunch of parasitic provides
-  - [BIGTOP-782] - 'service hue status' still show 'failed' after hue is started in SLES
-  - [BIGTOP-783] - 'service hue stop' cannot stop the hue service in SLES
-  - [BIGTOP-785] - clean up build-time dependencies in our packages
-  - [BIGTOP-788] - Oozie RPM build error due to lack of usr/lib/oozie/{libserver,oozie-server,libext}
-  - [BIGTOP-789] - remove shadow-utils dependency from zookeeper
-  - [BIGTOP-793] - add crunch integration tests
-  - [BIGTOP-794] - puppet code around hadoop_head_node breaks on newer puppets
-  - [BIGTOP-795] - need to abstract away the notion of Bigtop puppet repo
-  - [BIGTOP-796] - hive install tries to tickle HDFS and it shouldn't
-  - [BIGTOP-797] - provide a way to 'rsync' files into HDFS during puppet deployment
-  - [BIGTOP-798] - introduce a fatjar collection of all the Bigtop integration tests and all their dependencies
-  - [BIGTOP-800] - BIGTOP_JSVC is lacking ARCHIVE designation
-  - [BIGTOP-801] - prioritize jdk over jre in bigtop-utils javadetection
-  - [BIGTOP-803] - somehow puppet gets confused by the hadoop fs output
-
-- IMPROVEMENT
-  - [BIGTOP-12] - Add HCatalog to Bigtop
-  - [BIGTOP-22] - Unify init scripts
-  - [BIGTOP-58] - Add man page for Mahout
-  - [BIGTOP-60] - Add normal rpmbuild options for make rpm
-  - [BIGTOP-78] - Misc improvments 
-  - [BIGTOP-117] - Document incompatible API changes between releases
-  - [BIGTOP-153] - Build should detect invalid archives in the dl directory
-  - [BIGTOP-204] - Add information about supported projects and criteria for support to Apache Bigtop website
-  - [BIGTOP-233] - Ubuntu integration: ufw application profiles
-  - [BIGTOP-245] - Improve Bigtop layout
-  - [BIGTOP-263] - we need a way to provide supervision for long running daemon applications
-  - [BIGTOP-284] - It would be nice to integrate Nutch crawling as one of the MR (and perhaps HBase) tests for Bigtop
-  - [BIGTOP-304] - document test creation and test execution steps in Bigtop
-  - [BIGTOP-351] - Make HBASE_HEAPSIZE manageable via puppet
-  - [BIGTOP-375] - Publish a table of component dependency versions
-  - [BIGTOP-410] - Build secure version of HBase
-  - [BIGTOP-433] - Make variables used in build/install scripts consistent
-  - [BIGTOP-451] - ooziedb.sh script should run as the unix user that runs oozie server
-  - [BIGTOP-452] - oozie installation should offer to run ooziedb.sh
-  - [BIGTOP-456] - Consider splitting homedir between mapred and hdfs users?
-  - [BIGTOP-476] - Improvement of BigTop iTest framework
-  - [BIGTOP-497] - add bash tab completion to our hadoop packages
-  - [BIGTOP-517] - Flume packaging should use the term "agent" instead of "node".
-  - [BIGTOP-536] - provide suitable title in the <title> tag of the main home page for bigtop-incubating
-  - [BIGTOP-547] - create an HDFS setup helper script
-  - [BIGTOP-550] - Add ability to have multiple zookeeper servers to puppet
-  - [BIGTOP-571] - we might need to guard against direct use of the upstream launcher scripts
-  - [BIGTOP-575] - [Pseudo-dist config] Should yarn.nodemanager.remote-app-log-dir really be set?
-  - [BIGTOP-576] - Minor discrepancy in naming of logs/users vs. base log directories
-  - [BIGTOP-640] - Sync up dependency library versions automatically
-  - [BIGTOP-670] - provide a link to our Jenkins server from the Bigtop's project page
-  - [BIGTOP-680] - Write util function to capture test input parameters
-  - [BIGTOP-688] - improve hue packaging via making virtual env relocatable and moving DB files into /var/lib/hue
-  - [BIGTOP-710] - Create a higher level orchestration deployment framework
-  - [BIGTOP-712] - Bigtop 0.5.0 release
-  - [BIGTOP-713] - use newer debhelper and source format 3.0 (quilt) for Debian and Ubuntu packaging
-  - [BIGTOP-716] - standardize on bash as a shell of choice
-  - [BIGTOP-718] - Update wiki for component requirements
-  - [BIGTOP-726] - make it possible to run HiveBulkScriptExecutor scripts selectively
-  - [BIGTOP-731] - Reorganize Hadoop tests
-  - [BIGTOP-742] - tighten permissions on hadoop services home directories
-  - [BIGTOP-756] - Provide the thrift API interface file in the HBase packages
-  - [BIGTOP-757] - Install ZooKeeper jars into the local Maven cache
-  - [BIGTOP-769] - Create a generic shell executor iTest driver
-  - [BIGTOP-772] - Remove all references to the incubation from the trunk of Bigtop
-  - [BIGTOP-777] - separate the Package Tests into service tests, file content tests, and dependency tests
-  - [BIGTOP-779] - create a config management utility for SolrCloud
-  - [BIGTOP-784] - test deployment code on all of the supported platforms (using OpenJDK7 where available)
-  - [BIGTOP-787] - Surface all possible options in /etc/default/flume-node
-  - [BIGTOP-792] - org.apache.bigtop.itest.posix.Service needs to be updated to accomodate upstart/systemd
-  - [BIGTOP-799] - create an itest method for unpacking all the resources into the HDFS or local filesystem
-  - [BIGTOP-802] - Add rollback option to DataNode service script
-  - [BIGTOP-806] - now that MAPREDUCE-3916 is fixed we should enable webproxy
-
-- NEW FEATURE
-  - [BIGTOP-42] - Start doing testpatch-like patch validation
-  - [BIGTOP-64] - Package Apache tools used in Bigtop builds
-  - [BIGTOP-101] - Need to come up with a strategy for dealing with soft dependencies between packages
-  - [BIGTOP-103] - add support for upstart service management facility
-  - [BIGTOP-116] - Add support for systemd
-  - [BIGTOP-220] - Add mrunit to Bigtop
-  - [BIGTOP-246] - Provide more verbose output at artifact build time
-  - [BIGTOP-276] - Standardize how clients create classpaths and java.library.paths
-  - [BIGTOP-350] - Some sort of system expert would be useful for manifests reviews
-  - [BIGTOP-399] - Provide a one-click install package for seamless deployment of Bigtop from the release/nightly repos
-  - [BIGTOP-458] - Create some builds/tests jobs for openJDK 6 and 7 on bigtop01 jenkins
-  - [BIGTOP-460] - Add support for daemontools-encore
-  - [BIGTOP-524] - Add tests to verify that our init scripts conform to the LSB specification
-  - [BIGTOP-594] - Introduce git-style bigtop command line and BIGTOP_HOME environment variable
-  - [BIGTOP-601] - introduce bigtop-build packages for packaging build-time dependencies
-  - [BIGTOP-635] - Implement a cluster-abstraction, discovery and manipulation framework for iTest
-  - [BIGTOP-685] - provide a way to specify the parameters expected by a test
-  - [BIGTOP-687] - Support YARN as a subproject of Hadoop
-  - [BIGTOP-730] - add tarball driver into Bigtop
-
-- TASK
-  - [BIGTOP-236] - The user installed by hbase gets an incorrect home directory
-  - [BIGTOP-314] - add ZooKeeper systest to Bigtop testing regime 
-  - [BIGTOP-406] - Enable openSuse VM on bigtop01
-  - [BIGTOP-439] - we need to review package test manifest for the Bigtop 0.4.0 release
-  - [BIGTOP-681] - Package tests needed for new DataFu package
-  - [BIGTOP-715] - Add Spark packaging
-  - [BIGTOP-720] - Build environment automation
-  - [BIGTOP-760] - Push artifacts from non-Maven projects into local Maven cache
-
-- TEST
-  - [BIGTOP-614] - Add HDFS HA tests
-  - [BIGTOP-621] - Add test for distcp - intra cluster
-  - [BIGTOP-625] - Add HDFS Append Test
-  - [BIGTOP-629] - Adding Automated HA Tests 
-  - [BIGTOP-693] - Add hadoop-fuse-dfs test
-  - [BIGTOP-719] - Add TestTextSnappy to test hadoop fs -text with snappy compressed files
-  - [BIGTOP-728] - add datafu integration test
-
-- WISH
-  - [BIGTOP-67] - Resize bigtop logo in header of website
-
-* Release 0.4.0 (2012-06-25)
-
-- SUB-TASK
-  - [BIGTOP-249] - Fix namenode webapp
-  - [BIGTOP-250] - Build contrib modules
-  - [BIGTOP-251] - Making sure security works with the new packages for hadoop 0.23
-  - [BIGTOP-254] - Write init scripts for the new yarn services
-  - [BIGTOP-268] - Examples do not run
-  - [BIGTOP-270] - Need to create versionless jars for integration point with other projects
-  - [BIGTOP-271] - Kill the sbin package for hadoop 0.23
-  - [BIGTOP-299] - Recording the fact that Java needs to be installed *before* installing Bigtop (at least on Ubuntu)
-  - [BIGTOP-300] - Record the information on slow mirrors
-  - [BIGTOP-387] - Zookeeper version needs to be updated to 3.4.3
-  - [BIGTOP-424] - Bump version of Oozie to 3.1.3
-  - [BIGTOP-527] - Integrate Hue 2.0.0 into Bigtop
-  - [BIGTOP-538] - update puppet code for  BIGTOP-535
-  - [BIGTOP-539] - update puppet code for  BIGTOP-534
-  - [BIGTOP-603] - Bump version of Pig to 0.10.0
-  - [BIGTOP-604] - Bump version of Hive to 0.9.0
-  - [BIGTOP-605] - Create How To Release document
-  - [BIGTOP-606] - Bump version of HBase to 0.94.0
-  - [BIGTOP-617] - Bump version of Oozie to 3.2.0
-  - [BIGTOP-628] - bump version of Mahout to 0.7.0
-
-- BUG FIXES
-  - [BIGTOP-96] - Mahout examples try to write to /usr/lib/mahout/examples/bin/work
-  - [BIGTOP-162] - downstream components need to be patched to be compatible with MR2
-  - [BIGTOP-186] - whirr package ships source code and needs some permissions fixes
-  - [BIGTOP-264] - Hadoop 23 upstream tarball put some of its hadoop jars in the same directory as some of the dependencies
-  - [BIGTOP-269] - in .23 HBase need to link to a much wider set of hadoop jars
-  - [BIGTOP-272] - small cleanup for hadoop 0.23
-  - [BIGTOP-274] - yarn-site.xml needs to be addedd to the Hadoop client classpath
-  - [BIGTOP-277] - hive package in hadoop-0.23 branch is hopelessly broken
-  - [BIGTOP-283] - in hadoop-0.23 the sha for Hadoop needs to be bumped to the 23.1-SNAPSHOT cut off point
-  - [BIGTOP-285] - Remove source jars from hadoop-0.23
-  - [BIGTOP-286] - hadoop-0.23: hadoop-mapreduce-examples.jar is empty
-  - [BIGTOP-288] - hadoop-0.23: pig contrib should have jars
-  - [BIGTOP-289] - hadoop-0.23: need to package native libraries for compression
-  - [BIGTOP-291] - __id_u RPM macro seems to be disabled in newer versions of RPMs
-  - [BIGTOP-294] - brp-repack-jars chokes on guice-2.0.jar because of the directories without any read permission
-  - [BIGTOP-308] - hadoop-0.23 branch doesn't provide .so links to hadoop native libraries
-  - [BIGTOP-309] - hadoop-0.23 puppet code needs to deploy history server
-  - [BIGTOP-312] - Puppet resource dependency syntax used in hadoop/manifests/init.pp doesn't always work
-  - [BIGTOP-316] - split up hadoop packages into common, hdfs, mapreduce (and yarn)
-  - [BIGTOP-325] - bump version of Zookeeper to 3.4.2
-  - [BIGTOP-331] - our init scripts fail to detect a failed history server in hadoop 0.23
-  - [BIGTOP-333] - heed to start properly packaging httpfs
-  - [BIGTOP-338] - hadoop 0.23 doc package does not contain generated documentation
-  - [BIGTOP-339] - zookeeper init.d script has a typo in PID file location
-  - [BIGTOP-341] -  hadoop streaming is not available in hadoop-0.23 branch
-  - [BIGTOP-344] - puppet code needs to be updated to be able to deploy Hadoop 0.23 YARN in a secure fashion
-  - [BIGTOP-345] - container-executor.conf.dir needs to be set to /etc/hadoop/conf
-  - [BIGTOP-346] - container-executor needs to be owned by root:yarn and have permissions of 6050
-  - [BIGTOP-348] - jsvc location needs to be updated in init.d datanode script
-  - [BIGTOP-349] - jsvc from hadoop build fails on some of the Linux systems that Bigtop has to support
-  - [BIGTOP-355] - missed yarn.nodemanager.local-dirs, yarn.nodemanager.log-dirs and yarn.nodemanager.remote-app-log-dir in our puppet code
-  - [BIGTOP-357] - libjvm.so dependency on CentOS can only be satisfied by libgcj in some cases
-  - [BIGTOP-359] - documentation build is picked from the wrong place
-  - [BIGTOP-367] - Upgrade sequencing issue
-  - [BIGTOP-369] - pig package has license files in the wrong location
-  - [BIGTOP-372] - hive can't be started without setting HADOOP_HOME
-  - [BIGTOP-373] - bigtop-detect-javahome not customizable
-  - [BIGTOP-376] - Remove unused obsolete statement in hadoop packaging
-  - [BIGTOP-378] - history server security configs are missing
-  - [BIGTOP-381] - need to catch up with the HEAD of hadoop-common in hadoop-0.23
-  - [BIGTOP-383] - bigtop-detect-javahome installed at location unexpected by scripts on SLES
-  - [BIGTOP-389] - need to start packaging apache commons daemon jsvc
-  - [BIGTOP-392] - zookeeper package needs to be updated for 3.4 series
-  - [BIGTOP-393] - hadoop packages don't set nproc
-  - [BIGTOP-396] - Missing resource dependencies in puppet for secure clusters
-  - [BIGTOP-397] - hbase, sqoop and oozie don't use alternatives for managing configuration
-  - [BIGTOP-402] - manifests for package testing contain way too many duplicate data
-  - [BIGTOP-407] - hadoop package can't be installed on debian
-  - [BIGTOP-408] - HBase build needs to be worked around in hadoop-0.23 branch
-  - [BIGTOP-409] - sqoop build fails on SLES
-  - [BIGTOP-415] - need to workaround MAPREDUCE-3890
-  - [BIGTOP-416] - run levels for init.d scripts need to be harmonized
-  - [BIGTOP-417] - webapps shipped by bigtop components should be using a system-provided Tomcat
-  - [BIGTOP-418] - Package MAPREDUCE-2858 (MRv2 WebApp Security)
-  - [BIGTOP-420] - bigtop-detect-javahome installed at location unexpected by scripts on SLES
-  - [BIGTOP-421] - Sign every single file from the convenience artefact in the releases
-  - [BIGTOP-422] - need to start packaging fuse for Hadoop 0.23
-  - [BIGTOP-423] - hadoop package needs to be split into hadoop-client and hadoop-server packages
-  - [BIGTOP-426] - build failures in hadoop-0.23 branch
-  - [BIGTOP-428] - sqoop build needs to be forced to depend on HBase 0.92.0 artifacts
-  - [BIGTOP-429] - CATALINA_PID needs to be updated in hadoop-httpfs.default to correspond to the calculated value
-  - [BIGTOP-430] - oozie examples are missing
-  - [BIGTOP-434] - Potential issue in the way Bigtop test infrastructure deal with arch in packages
-  - [BIGTOP-435] - need to split hadoop subpackages even further for hadoop-0.23
-  - [BIGTOP-438] - need to bump the version of hadoop .23 to hadoop-0.23.3-SNAPSHOT
-  - [BIGTOP-443] - deb/oozie/oozie-client.postinst installs an alternative for a path that isn't there
-  - [BIGTOP-448] - Cannot install flume-ng-node pkg
-  - [BIGTOP-449] - hadoop package cleanup for hadoop-0.23
-  - [BIGTOP-450] - repackage Oozie as a true webapp
-  - [BIGTOP-453] - hadoop needs to depend on Zookeeper
-  - [BIGTOP-455] - we still need at HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec to be present in our launcher scrtips
-  - [BIGTOP-461] - hadoop-httpfs unconditionally removes the alternative
-  - [BIGTOP-462] - JSVC_HOME location can be both /usr/lib/bigtop-utils and /usr/libexec/bigtop-utils
-  - [BIGTOP-464] - There's currently a commented block in puppet/modules/hadoop/templates/yarn-site.xml that fails to render in some cases
-  - [BIGTOP-467] - [puppet] yarn historyserver currently doesn't depend on the mapreduce keytab as it should
-  - [BIGTOP-468] - [puppet] Fix httpfs dependencies and give it its own keytab 
-  - [BIGTOP-469] - [puppet] web authentication principal missing from hdfs-site.xml
-  - [BIGTOP-472] - make deb fails when building Whirr - OutOfMemoryError: PermGen space
-  - [BIGTOP-473] - zookeeper man page needs to be updated
-  - [BIGTOP-474] - improve Oozie integration with Hadoop 0.23
-  - [BIGTOP-477] - launcher script for pig, hive, sqoop and mahout should not source /etc/default/hadoop anymore
-  - [BIGTOP-478] - perhaps we should go back to mapred as a user name for mapreduce daemons in Hadoop 0.23
-  - [BIGTOP-479] - init.d scripts should provide an option for initializing persistent state of the services that require it
-  - [BIGTOP-480] - log aggregation knob is now yarn.log-aggregation-enable and needs to be enabled
-  - [BIGTOP-481] - fix the way to handle repo key for Debian like system
-  - [BIGTOP-482] - HTTPFS puppet code needs to be updated
-  - [BIGTOP-484] - Fix typo error ('alredy' -> 'already')
-  - [BIGTOP-485] - transition package dependencies to hadoop-client package
-  - [BIGTOP-487] - oozie security configuration needs to be updated
-  - [BIGTOP-488] - TestCLI missing dependency
-  - [BIGTOP-493] - [puppet] core-site.xml and httpfs-site.xml end up changing every few times puppet is run
-  - [BIGTOP-496] - Hive ships some source code in the package.
-  - [BIGTOP-498] - Oozie's home dir is currently set to /var/run/oozie but should be /var/lib/oozie
-  - [BIGTOP-500] - Oozie hive workflows are broken because of the HADOOP_HOME being deprecated
-  - [BIGTOP-501] - it would help to have libext point to /var/lib/oozie in Oozie packaging
-  - [BIGTOP-502] - Flume NG init script should pass --name "agent" instead of --name "<hostname>"
-  - [BIGTOP-503] - remove unsupported upstream launcher scripts from packaging
-  - [BIGTOP-505] - Flume should exit 0 when trying to stop a non-running service
-  - [BIGTOP-506] - Flume should exit 0 when trying to start an already running service
-  - [BIGTOP-507] - sqoop deb package is empty
-  - [BIGTOP-509] - make all is failing because flume-1.0.0-incubating.tar.gz does not exist in APACHE_MIRROR
-  - [BIGTOP-510] - log4j property must be set with property in for Flume NG pkg
-  - [BIGTOP-512] - oozie can't be installed on debian
-  - [BIGTOP-513] - Pig Packaging Cleanup
-  - [BIGTOP-516] - it would be cleaner to get rid of mapreduce entries in limits.d/mapreduce.conf now that we consolidated the user back to mapred
-  - [BIGTOP-519] - /etc/default files are not marked as configs by various hadoop packages
-  - [BIGTOP-521] - Hadoop Pseudo-conf files have wrong permissions
-  - [BIGTOP-526] - it could be useful to have dfs.permissions.enabled set to true in our configs
-  - [BIGTOP-528] - A small patch to give better return codes which can be understood by chef and pupper among others.
-  - [BIGTOP-529] - [puppet] We don't currently format zookeeper nodes on first run
-  - [BIGTOP-530] - [puppet] We currently xst the HTTP principal multiple times, each time invalidating the previous one
-  - [BIGTOP-537] - provide a workaround for non-LSB compliant catalina.sh handling of a stop action
-  - [BIGTOP-540] - hadoop package on debian is missing JSVC_HOME specification
-  - [BIGTOP-541] - Build issue for hadoop RPM
-  - [BIGTOP-543] - fuse_dfs_wrapper.sh should not be provided
-  - [BIGTOP-544] - zookeeper-server init doesn't work
-  - [BIGTOP-546] - Hadoop rpm is missing fuse default file
-  - [BIGTOP-548] - we should consider moving the creation of the hive user into the hive package
-  - [BIGTOP-552] - Enable HDFS durable sync
-  - [BIGTOP-554] - rat is complaining again
-  - [BIGTOP-556] - hbase, yarn and hdfs users are getting incorrect home directories on Debian
-  - [BIGTOP-557] - /etc/oozie/conf and oozie-conf alternative should belong to oozie instead of oozie-client
-  - [BIGTOP-558] - debian description can't have the following set of characters ${VAR}
-  - [BIGTOP-559] - HttpFS fails to load Hadoop configuration after installation. 
-  - [BIGTOP-560] - Flume-ng does not depend on ant or xml-commons anymore
-  - [BIGTOP-561] - HBase packaging has dangling symlinks
-  - [BIGTOP-562] - fix Mahout smoke test in trunk branch
-  - [BIGTOP-563] - Build behind proxy fails
-  - [BIGTOP-564] - Build failure for big-top trunk for pig, oozie, and whirr
-  - [BIGTOP-565] - catalina-based services should be really careful with catalina stop
-  - [BIGTOP-568] - Move the configuration directory for hadoop to /etc/hadoop
-  - [BIGTOP-574] - Add netcat and fuser as dependencies for some of the Bigtop stack. 
-  - [BIGTOP-579] - hadoop-yarn creates log files with inconsistent ownership on Debian/Ubuntu
-  - [BIGTOP-580] - a wrapper for the ZooKeeper cleanup script is missing
-  - [BIGTOP-582] - when running Hadoop daemons pidofproc is mistakenly looking for 'su' instead of 'java'
-  - [BIGTOP-583] - "Class path contains multiple SLF4J bindings" warnings
-  - [BIGTOP-585] - hbase package has dangling symlinks
-  - [BIGTOP-586] - HBase package should provide a versionless symlink to a jar for other projects to depend upon
-  - [BIGTOP-587] - Hive package needs to depend on Zookeper and HBase
-  - [BIGTOP-588] - Oozie puppet code needs to take care of initializing the DB
-  - [BIGTOP-589] - TestLoadAndVerify's HBase scanner caching setting too high
-  - [BIGTOP-590] - hadoop man page needs to be updated
-  - [BIGTOP-592] - clean up content of conf.empty and conf.pseudo config files
-  - [BIGTOP-593] - improve the package test to handle [arch=xx] in the repo file
-  - [BIGTOP-595] - flume-ng agent should honor /etc/default settings
-  - [BIGTOP-596] - move service configuration from groovy code to package xml manifest file
-  - [BIGTOP-597] - Flume init scripts should not create the log and lock directories.
-  - [BIGTOP-598] - missing zookeeper init on debian
-  - [BIGTOP-599] - /var/lock/subsys may not exist on some distributions and this create some issues in Apache Hadoop
-  - [BIGTOP-600] - hbase init script should not source hbase-config
-  - [BIGTOP-607] - remove python build dependency from Hive package
-  - [BIGTOP-608] - remove source/build artifacts from the Pig package
-  - [BIGTOP-609] - add support for openjdk 1.7
-  - [BIGTOP-616] - Fix TestHadoopSmoke and TestHadoopExamples: tests look for streaming and examples jar in wrong directory
-  - [BIGTOP-626] - small misc. issues with the newly introduced Giraph package
-  - [BIGTOP-630] - Fix services priorities
-  - [BIGTOP-633] - bigtop-utils packages should depend on 'which'
-  - [BIGTOP-636] - a few minor tweaks to the Hue deployment code
-  - [BIGTOP-638] - move <services> block from common package_date.xml to each platform's (yum, apt, zypper) package_data.xml
-  - [BIGTOP-639] - sqoop-metastore cannot stop because "Could not load db driver class: org.hsqldb.jdbcDriver"
-  - [BIGTOP-642] - Suse does not have a package named which 
-  - [BIGTOP-643] - Fix package statement in TestHDFSQuota
-  - [BIGTOP-646] - update metadata for Hue in the package testing manifest
-  - [BIGTOP-647] - service flume-node fail to start
-  - [BIGTOP-648] - hbase-thrift cannot be started properly
-  - [BIGTOP-649] - PackageTestCommon.checkRemoval() is broken
-  - [BIGTOP-650] - Override libexecdir for bigtop-utils on mageia
-
-- IMPROVEMENT
-  - [BIGTOP-266] - Need to override default yarn task classpath
-  - [BIGTOP-273] - Need to update puppet recipies for Hadoop 0.23 deployment
-  - [BIGTOP-313] - Move puppet configuration out of manifest and into extlookup() CSV
-  - [BIGTOP-322] - update .gitignore to ignore backup files and eclipse generated files.
-  - [BIGTOP-332] - it would be nice to bump hadoop version to 0.23.1-SNAPSHOT across the stack
-  - [BIGTOP-366] - Set some minimum versions for ZK + Flume
-  - [BIGTOP-371] - Update bigtop repository url for the applance
-  - [BIGTOP-454] - update oozie man page
-  - [BIGTOP-465] - webhdfs isn't in the puppeted hadoop configuration
-  - [BIGTOP-466] - Secure zookeeper support missing from puppet
-  - [BIGTOP-470] - [puppet] Improve secure configuration for zk and hbase
-  - [BIGTOP-475] - Provides initscript for the REST HBase gateway
-  - [BIGTOP-522] - Minor Hive Packaging Cleanup
-  - [BIGTOP-534] - Need init.d script for zkfc
-  - [BIGTOP-535] - add httpfs config property pointing to hadoop conf dir
-  - [BIGTOP-555] - Bump version of HBase to 0.92.1
-  - [BIGTOP-567] - Make sure we can detect the JAVA_HOME for java-package built JDK package
-  - [BIGTOP-572] - Fix HDFS directory values in pseudo-conf hdfs-site.xml for 0.23+ NN/DN/SNN
-  - [BIGTOP-573] - we no longer have to override container-executor.conf.dir
-  - [BIGTOP-577] - [base conf] There's no hadoop-metrics2.properties present in the configs that are shipped.
-
-- NEW FEATURE
-  - [BIGTOP-248] - Update packages for Hadoop 0.23
-  - [BIGTOP-267] - Need to implement implement makefiles logic for tarball builds
-  - [BIGTOP-306] - Make a boxgrinder appliance
-  - [BIGTOP-405] - Create a bootable ISO
-  - [BIGTOP-495] - [puppet] Add support for HA namenodes
-  - [BIGTOP-570] - Build package for Apache Giraph
-  - [BIGTOP-578] - Integrate Hue 2.0.0 into Bigtop
-  - [BIGTOP-602] - Bigtop 0.4.0 release
-
-- TASK
-  - [BIGTOP-298] - Upgrade to ZooKeeper 3.4.1
-  - [BIGTOP-319] - update to Mahout 0.6
-  - [BIGTOP-494] - Create Flume 1.1.0 packages in Bigtop
-
-- TEST
-  - [BIGTOP-321] - Make TestLoadAndVerify parameterizable and runnable from command line
-  - [BIGTOP-444] - Add sqoop smoke tests
-  - [BIGTOP-569] - fix for TestCLI failures due to change in behavior of hadoop fs -put
-  - [BIGTOP-610] - Add basic hdfs fsck test and move logError helper to bigtop-test-framework
-  - [BIGTOP-611] - Add HBase TestImportTsv
-  - [BIGTOP-618] - Add HBase hbck test 
-  - [BIGTOP-619] - Add HBase CopyTable test
-  - [BIGTOP-620] - Add test for dfsadmin commands
-  - [BIGTOP-622] - Add test for HDFS Balancer
-  - [BIGTOP-623] - Add test for HDFS Quota Testing
-  - [BIGTOP-627] - Add HBase balancer test 
-  - [BIGTOP-631] - Add HBase Import/Export test
-  - [BIGTOP-641] - Add hdfs fsck options in TestFsck
-
-* Release 0.3.0 (2012-03-20)
-
-- SUB-TASK
-  - BIGTOP-53: Bump version of flume to 0.9.4
-  - BIGTOP-253: Update the debs for hadoop 0.23
-  - BIGTOP-317: rename hadoop-zookeeper package into zookeeper
-  - BIGTOP-327: rename hadoop-hbase package into hbase
-  - BIGTOP-328: rename hadoop-pig package into pig
-  - BIGTOP-329: rename hadoop-hive package into hive
-  - BIGTOP-353: Bump version of Hadoop to 1.0.1
-  - BIGTOP-354: Bump version of HBase to 0.92
-  - BIGTOP-360: Bump version of Hive to 0.8.1
-  - BIGTOP-361: Bump version of Pig to 0.9.2
-  - BIGTOP-362: Bump version of Whirr to 0.7.0
-  - BIGTOP-363: Bump version of sqoop to sqoop-1.4.1
-  - BIGTOP-387: Zookeeper version needs to be updated to 3.4.3
-  - BIGTOP-388: Let's add Mahout 0.6 to trunk
-  - BIGTOP-424: Bump version of Oozie to 3.1.3
-
-- BUG
-  - BIGTOP-89: mahout examples in package using mahout from /usr/lib/mahout/bin/mahout rather than /usr/bin/mahout
-  - BIGTOP-191: refactor hadoop::create_hdfs_dirs
-  - BIGTOP-209: on debian hadoop launcher script has exec debian/tmp/usr/lib/hadoop/bin/hadoop
-  - BIGTOP-241: hadoop .21+ requires /jobtracker to be available in HDFS
-  - BIGTOP-247: Typo in error msgs
-  - BIGTOP-257: HBase test source files are in wrong dir, cause compiler errors within Eclipse
-  - BIGTOP-258: Compilation Error in sqoop-integration project when using Eclipse
-  - BIGTOP-259: Most of the mahout tests can't be executed out-of-the box
-  - BIGTOP-262: Compilation Error in  project hadoop-smoke when using Eclipse
-  - BIGTOP-265: HBase smoke test execution needs to have extra class-path elements to pick up HBase configs
-  - BIGTOP-278: bigtop-packages/src/common/hadoop/do-component-build contains bash syntax but is executed by dash on Debian/Ubuntu
-  - BIGTOP-296: puppet code for sqoop module lacks require for sqoop package
-  - BIGTOP-312: Puppet resource dependency syntax used in hadoop/manifests/init.pp doesn't always work
-  - BIGTOP-315: Cut-n-paste remains of @HADOOP_DAEMON@ in non-hadoop init.d scripts
-  - BIGTOP-318: Appliance VM should not have an ephemeral Apache Bigtop (incubating) repository
-  - BIGTOP-320: hive RPM spec uses missing __rmdir macros
-  - BIGTOP-325: bump version of Zookeeper to 3.4.2
-  - BIGTOP-334: Boxgrinder does not support space in repo names
-  - BIGTOP-339: zookeeper init.d script has a typo in PID file location
-  - BIGTOP-342: Flume build and install scripts should be executable
-  - BIGTOP-343: BIGTOP-109 was not applied to debs
-  - BIGTOP-352: bump versions of the components required for the next quarterly release of Bigtop
-  - BIGTOP-356: hbase pacakge should not ship jar files provided by zookeeper and hadoop
-  - BIGTOP-365: Hive metastore hangs on startup on Centos5
-  - BIGTOP-367: Upgrade sequencing issue
-  - BIGTOP-370: Hive metastore does not initialize with permissions failure
-  - BIGTOP-373: bigtop-detect-javahome not customizable
-  - BIGTOP-374: bigtop-detect-javahome does not pickup new multiarch locations in Debian/Ubuntu
-  - BIGTOP-376: Remove unused obsolete statement in hadoop packaging
-  - BIGTOP-379: Package testing for Bigtop 0.3.0 release
-  - BIGTOP-382: hadoop-conf-pseudo packages contains subversion metada
-  - BIGTOP-383: bigtop-detect-javahome installed at location unexpected by scripts on SLES
-  - BIGTOP-384: HBase bundle 2 different versions of hadoop as well as a zookeeper version
-  - BIGTOP-386: HBase execution script should exclude ZOOKEEPER_CONF
-  - BIGTOP-390: add Fedora to the list of platforms recongnized by iTest
-  - BIGTOP-391: update package test manifest
-  - BIGTOP-392: zookeeper package needs to be updated for 3.4 series
-  - BIGTOP-393: hadoop packages don't set nproc
-  - BIGTOP-394: hbase postinst script needs to be renamed
-  - BIGTOP-396: Missing resource dependencies in puppet for secure clusters
-  - BIGTOP-397: hbase, sqoop and oozie don't use alternatives for managing configuration
-  - BIGTOP-401: unable to run hadoop-mapreduce-historyserver
-  - BIGTOP-402: manifests for package testing contain way too many duplicate data
-  - BIGTOP-404: flume-ng does not start
-  - BIGTOP-420: bigtop-detect-javahome installed at location unexpected by scripts on SLES
-  - BIGTOP-427: Add support to itest-common for Oracle Linux
-  - BIGTOP-428: sqoop build needs to be forced to depend on HBase 0.92.0 artifacts
-  - BIGTOP-430: oozie examples are missing
-  - BIGTOP-431: Last bump to Hadoop 1.0.1 has broken Mageia's build of Apache Hadoop
-  - BIGTOP-432: BIGTOP-424 broke oozie build
-  - BIGTOP-437: hadoop 1.0.1 complains about HADOOP_HOME being deprecated
-  - BIGTOP-440: need to cleanup pom files in bigtop-tests/test-execution
-  - BIGTOP-442: add Apache license to the puppet code
-  - BIGTOP-443: deb/oozie/oozie-client.postinst installs an alternative for a path that isn't there
-  - BIGTOP-446: Typo in hadoop module for puppet
-  - BIGTOP-459: remove references to cloudera from the packaging files
-
-- IMPROVEMENT
-  - BIGTOP-17: HBase has no "alternatives" whereas Hadoop and ZK do
-  - BIGTOP-210: Organize yum repo by architecture
-  - BIGTOP-221: Remove hadoop prefix from non-hadoop packages
-  - BIGTOP-261: pseudo distributed config would benefit from dfs.safemode.extension set to 0 and dfs.safemode.min.datanodes set to 1
-  - BIGTOP-281: Ivy home for Debian builds should be set to $HOME/.ivy2
-  - BIGTOP-313: Move puppet configuration out of manifest and into extlookup() CSV
-  - BIGTOP-322: update .gitignore to ignore backup files and eclipse generated files.
-  - BIGTOP-371: Update bigtop repository url for the applance
-
-- NEW FEATURE
-  - BIGTOP-255: add tests for Mahout
-  - BIGTOP-260: Mahout puppet module needs to be implemented
-  - BIGTOP-303: Rename Bigtop packages to reflect TLP status of packaged projects
-  - BIGTOP-306: Make a boxgrinder appliance
-  - BIGTOP-323: Start integrating Flume NG
-  - BIGTOP-403: RPM Packages should display the GNU/Linux distribution they are supposed to be installed on
-  - BIGTOP-405: Create a bootable ISO
-
-- TASK
-  - BIGTOP-157: add support for ZooKeeper version 3.4.0
-  - BIGTOP-298: Upgrade to ZooKeeper 3.4.1
-  - BIGTOP-439: we need to review package test manifest for the Bigtop 0.3.0 release
-
-- TEST
-  - BIGTOP-287: Integrating test for HBASE-4570
-  - BIGTOP-321: Make TestLoadAndVerify parameterizable and runnable from command line
-  - BIGTOP-411: Add TestRegionMover to HBase system tests
-  - BIGTOP-412: Add TestMoveRootMetaRegions to HBase system tests
-  - BIGTOP-414: enable hadoop tests in hadoop-0.23 branch to build and to run
-
-* Release 0.2.0 (2011-11-07)
-
-- INCOMPATIBLE CHANGES
-
-- IMPROVEMENTS
-  - BIGTOP-11: Adds Apache Mahout (0.5) to Bigtop. (rvs)
-  - BIGTOP-25: Add a new bigtop-utils package which provides some java home autodetection (bmahe)
-  - BIGTOP-34: Add Hive server package (rvs)
-  - BIGTOP-54: Bump version of oozie to 2.3.2 (rvs)
-  - BIGTOP-55: Bump version of sqoop to sqoop-1.3.0 (rvs)
-  - BIGTOP-56: Bump version of hive to hive-0.7.1 (rvs)
-  - BIGTOP-90: Bump version of Hadoop to 0.20.205.0 (rvs)
-  - BIGTOP-95: Puppet-based deployment infrastructure needs to be added to bigtop (rvs)
-  - BIGTOP-120: Bump version of pig to 0.9.1 (rvs)
-  - BIGTOP-121: Bump version of HBase to 0.90.4 (rvs)
-
-- BUG FIXES
-  - BIGTOP-27: Add disclaimer to website. (edwardyoon)
-  - BIGTOP-4:  Tweaks POMs to use Apache POM as parent, get rid of obsolete infrastructure info. (abayer)
-  - BIGTOP-15: Adds build dependency on libssl-dev on Debian. (plinnell)
-  - BIGTOP-20: hard-coded pid location in Hadoop init.d scripts (rvs)
-  - BIGTOP-26: Fixing svn properties. (abayer)
-  - BIGTOP-29: Remove java dependency from all os types (rvs)
-  - BIGTOP-30: Fixes ownership of /usr/lib/flume/bin/flume-daemon.sh. (abayer)
-  - BIGTOP-31: Remove /usr/lib/whirr/cli/whirr.log from packaging. (abayer)
-  - BIGTOP-32: Hadoop fuse package should include a file with defaults (rvs)
-  - BIGTOP-38: Add link to the mailing list archives to the Bigtop website (rvs)
-  - BIGTOP-40: iTest package names/groupIds need to be moved to org.apache rather than com.cloudera (rvs)
-  - BIGTOP-43: HBase and Whirr should now rebuild for Debian packages. (abayer)
-  - BIGTOP-45: Fix build issue for Mageia and openSUSE (bmahe)
-  - BIGTOP-46: Switches to libzip1 and libzip-dev for Hadoop Debian dependencies. (abayer)
-  - BIGTOP-47: Switch Whirr to download from archive.apache.org. (abayer)
-  - BIGTOP-48: Fix pig build on Mageia (bmahe)
-  - BIGTOP-50: Flume init script's dependency on syslog needs to be removed (rvs)
-  - BIGTOP-51: Reorganizes source repository. (abayer)
-  - BIGTOP-52: Fold integration/sqoop tests into smokes (rvs)
-  - BIGTOP-57: Apt repo creation fails due to invalid distributions file (rvs)
-  - BIGTOP-61: HBase fails to add HADOOP_CONF_DIR to its classpath (rvs)
-  - BIGTOP-62: /usr/lib/hbase and everything within is owned by hbase:hbase (rvs)
-  - BIGTOP-63: Hbase leaves some unnecessary directories and symlinks when being uninstalled (rvs)
-  - BIGTOP-68: Moves test-artifacts/(module)/src/test to .../src/main. (abayer)
-  - BIGTOP-69: Certain tests in iTest common require extraneous dependencies and should be @Ignored for now (rvs)
-  - BIGTOP-70: Need to stub Hadoop 0.21+ dependencies for now in test artifacts (rvs)
-  - BIGTOP-71: If explicit Ant dependency was not specified, unit tests would fail. (abayer)
-  - BIGTOP-72: Gmaven plugin needs to be explicitly bound to compilation goals (rvs)
-  - BIGTOP-73: oozie-client can't be safely removed on Debian (rvs)
-  - BIGTOP-74: Add Mahout to package readiness tests (rvs)
-  - BIGTOP-75: Mahout package is broken (rvs)
-  - BIGTOP-76: Package tests need to be refactored in order to provide accurate reporting (rvs)
-  - BIGTOP-77: All of our config files should be marked %config and (noreplace) (rvs)
-  - BIGTOP-80: Add package dependencies to Zookeeper RPM for programs invoked from install/uninstall scripts (rvs)
-  - BIGTOP-81: oozie-setup.sh script fails if Oozie has not been started once (rvs)
-  - BIGTOP-82: Package testing manifest needs to be updated now that we've enabled more tests (rvs)
-  - BIGTOP-83: Advanced checks in checkRemoval need to be moved out (rvs)
-  - BIGTOP-84: Package removal fails in case alternatives are deleted or damaged (rvs)
-  - BIGTOP-85: hadoop package ships native files (rvs)
-  - BIGTOP-86: Misc. issues with Oozie package (rvs)
-  - BIGTOP-87: The following packages don't remove alternatives: flume, hadoop-pig, hadoop-hive, hadoop-hbase (rvs)
-  - BIGTOP-88: Use Hadoop package's hadoop jar in classpath. (abayer)
-  - BIGTOP-91: Docs should be in %{_docdir}/<PACKAGE_NAME>-<PACKAGE_VERSION> (rvs)
-  - BIGTOP-92: Flume's dependency on ZK needs to be the same for DEB as it is for RPM (rvs)
-  - BIGTOP-93: ZK dependencies can not be satisfied on SUSE (rvs)
-  - BIGTOP-94: chkconfig startup priorities need to be normalized (rvs)
-  - BIGTOP-97: default files need to be normalized and moved to common (rvs)
-  - BIGTOP-99: Fix sqoop build for Mageia (bmahe)
-  - BIGTOP-100: Whirr packaging build fails if we're not running the tests. (abayer)
-  - BIGTOP-102: daemons need to be taken care of when packages are installed/upgraded/uninstalled (rvs)
-  - BIGTOP-104: hadoop daemons can not be started because hadoop-config.sh resets HADOOP_HOME (rvs)
-  - BIGTOP-105: hadoop services can not be started with the default configs in place (rvs)
-  - BIGTOP-108: oozie docs are installed at the incorrect location (rvs)
-  - BIGTOP-109: Hadoop should not format the namenode on installation (bmahe)
-  - BIGTOP-110: DN can't be started in secure configuration on SLES (rvs)
-  - BIGTOP-111: Add urpmi support in test framework for Mageia (bmahe)
-  - BIGTOP-112: Fixes for Mageia * Fix zlib dependency for Mageia * Replace textutils dependency by coreutils (bmahe)
-  - BIGTOP-113: Fix dependency for mageia on lsb init scripts helpers for flume, hadoop, sqoop, zookeeper and hbase (bmahe)
-  - BIGTOP-119: Create version-less symlinks of hadoop jars (bmahe)
-  - BIGTOP-122: enable hadoop-sbin package (rvs)
-  - BIGTOP-123: enable hadoop-fuse package (rvs)
-  - BIGTOP-124: documentation on SLES is not supposed to have package version embedded in the directory name (rvs)
-  - BIGTOP-125: whir package is full of build artifacts (rvs)
-  - BIGTOP-126: zookeeper rpm should require groupadd/useradd (rvs)
-  - BIGTOP-127: BIGTO-120 broke build on at least Fedora (and Mageia) (rvs)
-  - BIGTOP-128: Need to call create-c++-configure target so autoreconf is called for native projects (bmahe)
-  - BIGTOP-129: oozie init.d scripts should transition to using oozied.sh from upstream (rvs)
-  - BIGTOP-130: Mahout package has an unneeded dependency on chkconfig (rvs)
-  - BIGTOP-131: RPM dependencies for flume need to be optimized (rvs)
-  - BIGTOP-132: hadoop datanode on SLES returns an incorrect code on stop/start (rvs)
-  - BIGTOP-133: hadoop packages should set up nofile limits automatically (rvs)
-  - BIGTOP-134: hbase packages should set up nofile limits automatically (rvs)
-  - BIGTOP-136: hadoop package still leaves binary files in /usr/lib/hadoop/lib (rvs)
-  - BIGTOP-137: task-controller needs to be rebuilt (rvs)
-  - BIGTOP-138: task-controller binary needs to be root:mapred with 4750 permissions (rvs)
-  - BIGTOP-139: links to jsvc and task-controller need to be provided in order to make hard-coded values work (rvs)
-  - BIGTOP-140: need to workaround HDFS-1943 (rvs)
-  - BIGTOP-141: hadoop now requires an explicit declaration of a HADOOP_SECURE_DN_USER in order to run a secure datanode (rvs)
-  - BIGTOP-142: need to update zookeeper manifest to the state of packages (rvs)
-  - BIGTOP-143: need to update hadoop manifest to the state of packages (rvs)
-  - BIGTOP-144: kerberos deployment needs to be made compatible with default JRE crypto setup (rvs)
-  - BIGTOP-145: make cluster.pp be able to handle secure and unsecure deployments (rvs)
-  - BIGTOP-148: Hadoop fuse man page does not have the right permission and RPMs should not specify its extension (bmahe)
-  - BIGTOP-149: HBase now requires commons-configuration jar to be in its classpath (rvs)
-  - BIGTOP-150: hadoop puppet deployment needs to install hadoop-sbin on datanodes when security is turned on (rvs)
-  - BIGTOP-151: need to provide more test coverage for HBase testing (rvs)
-  - BIGTOP-152: TestHadoopExamples needs refactoring into a parameterized test (rvs)
-  - BIGTOP-155: need to implement a uniform way of versioning dependencies for test artifacts (rvs)
-  - BIGTOP-156: Only run autoreconf when autotools >= 2.61 is installed (patch by Bruno Mahe) (rvs)
-  - BIGTOP-158: oozie build pulls into oozie.war incorrect version of hadoop-core jar file (rvs)
-  - BIGTOP-159: oozie init.d script is not executable (plinnell)
-  - BIGTOP-160: hadoop has deprecated HADOOP_HOME (rvs)
-  - BIGTOP-161: the new layout that Pig script adopted needs to be worked around (rvs)
-  - BIGTOP-163: stock hadoop doesn't support wildcards in proxy users (rvs)
-  - BIGTOP-164: lzo codecs need to be disabled everywhere in Bigtop (rvs)
-  - BIGTOP-165: oozie manifest includes functionality (sqoop and hive actions) that is not in oozie (rvs)
-  - BIGTOP-166: Add the missing examples. (plinnell)
-  - BIGTOP-167: All of Hadoop wrapper scripts and init.d scripts need to start include find-java code from bigtop-utils (bmahe)
-  - BIGTOP-169: Hadoop should buildrequire openssl-devel for centos builds (plinnell)
-  - BIGTOP-170: Add some known openJDK locations (bmahe)
-  - BIGTOP-171: All of Pig wrapper scripts and init.d scripts need to start include find-java code from bigtop-utils (bmahe)
-  - BIGTOP-173: we still have references to hadoop-0.20 in our puppet code (rvs)
-  - BIGTOP-174: Does pig really requires git to build?  (plinnell)
-  - BIGTOP-175: All of HBase wrapper scripts and init.d scripts need to start include find-java code from bigtop-utils (bmahe)
-  - BIGTOP-176: Cleanup mainteners as well as mentions to Cloudera (bmahe)
-  - BIGTOP-178: All of Flume wrapper scripts and init.d scripts need to start include find-java code from bigtop-utils (bmahe)
-  - BIGTOP-179: All of Hive wrapper scripts and init.d scripts need to start include find-java code from bigtop-utils (bmahe)
-  - BIGTOP-180: All of Mahout wrapper scripts and init.d scripts need to start include find-java code from bigtop-utils (bmahe)
-  - BIGTOP-181: puppeted hadoop doesn't place PID files into /var/run/hadoop (rvs)
-  - BIGTOP-182: All of Oozie wrapper scripts and init.d scripts need to start include find-java code from bigtop-utils (bmahe)
-  - BIGTOP-184: All of Sqoop wrapper scripts and init.d scripts need to start include find-java code from bigtop-utils (bmahe)
-  - BIGTOP-188: All of Whirr wrapper scripts and init.d scripts need to start include find-java code from bigtop-utils (bmahe)
-  - BIGTOP-189: hadoop daemons do not run under the dedicated users (hdfs, mapred) (rvs)
-  - BIGTOP-190: All of Zookeeper wrapper scripts and init.d scripts need to start include find-java code from bigtop-utils (bmahe)
-  - BIGTOP-191: refactor hadoop::create_hdfs_dirs (rvs)
-  - BIGTOP-192: oozie package needs to depend on unzip (rvs)
-  - BIGTOP-192: oozie package needs to depend on zip (rvs)
-  - BIGTOP-193: zookeeper doesn't generate any logs (rvs)
-  - BIGTOP-194: source is not a sh(1) command (bmahe)
-  - BIGTOP-195: Document build requirements for openSUSE 11.4 (plinnell)
-  - BIGTOP-197: Package testing artifact needs to be refactored (rvs)
-  - BIGTOP-198: remove hadoop-daemon.sh is included in too many packages (plinnell)
-  - BIGTOP-199: whirr docs are installed in the incorrect location (rvs)
-  - BIGTOP-201: bigtop package has an incorrect dependency on libhdfs.so (rvs)
-  - BIGTOP-202: zookeeper package is missing zoo.cfg (rvs)
-  - BIGTOP-203: sqoop needs to have access to hadoop .jars in its lib (rvs)
-  - BIGTOP-205: zookeeper-server doesn't do nohup before launching a damon (rvs)
-  - BIGTOP-206: during deployment time hdfs/mapred dir creation should be managed by hadoop module (rvs)
-  - BIGTOP-207: zookeeper doesn't allow for overriding the location of the PID file (rvs)
-  - BIGTOP-208: Zookeeper does not depend on git or subversion (bmahe)
-  - BIGTOP-209: on debian hadoop launcher script has exec debian/tmp/usr/lib/hadoop/bin/hadoop (rvs)
-  - BIGTOP-212: need to disable tests for features missing from Bigtop 0.2.0 (rvs)
-  - BIGTOP-214: sequencing in puppet module for Hadoop needs to be imporved (rvs)
-  - BIGTOP-215: make site.pp a bit more friendly to how we start up cluster in Bigtop jenkins (rvs)
-  - BIGTOP-216: make oozie tests more robust in finding oozie-examples.tar.gz (rvs)
-  - BIGTOP-217: workaround Hive SQL parser in headers for our tests (rvs)
-
-* Release 0.1.0 (2011-08-19)
-
-- INCOMPATIBLE CHANGES
-
-- IMPROVEMENTS
-  - BIGTOP-21: Add support for Mageia. (bmahe)
-  - BIGTOP-14: Add CHANGES.txt file. (abayer)
-  - BIGTOP-9: Add Bigtop podling website. (edwardyoon)
-  - BIGTOP-6: Adding tarball generation to top-level makefile. (abayer)
-
-- BUG FIXES
-  - BIGTOP-13: Fix Hive compilation issue. (bmahe)
-  - BIGTOP-10: Fixing NOTICE file. (abayer)
-  - BIGTOP-8: Debian copyright files needed to be reformatted. (bmahe from James Page)
-  - BIGTOP-3: Add top-level, real .gitignore file for git-svn. (abayer)
-  - BIGTOP-2: Adding/updating license headers for ASF. (abayer)
-  - BIGTOP-1: Initial code import. (abayer)
diff --git a/LICENSE b/LICENSE
deleted file mode 100644
index d645695..0000000
--- a/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/MAINTAINERS.txt b/MAINTAINERS.txt
deleted file mode 100644
index 93cf97a..0000000
--- a/MAINTAINERS.txt
+++ /dev/null
@@ -1,30 +0,0 @@
-alluxio: jay vyas, huamin chen
-apex: chinmay <ch...@apache.org>, aniruddha <an...@apache.org>
-bigpetstore: jay vyas, rj nowling
-CI infra: rvs
-data generators: rj nowling, jay vyas
-flink: rmetzger, mbalassi
-flume: bmahe
-gradle / build system: cos, rvs
-ignite-hadoop: cos, Sergey Vladykin <se...@gmail.com>
-hadoop: mark grover, cos, rvs
-hama: minho kim <mi...@apache.org>, edward j. yoon
-hbase: andrew purtell, rvs
-hive: mark grover, youngwoo kim
-itest: cos, rvs
-mvn publishing/packaging: rvs
-oozie evans ye, rvs
-phoenix: andrew purtell, youngwoo kim
-pig: evans ye, daniel dai
-puppet recipes: jay vyas, cos, evans ye, rvs
-qfs: kstinson <ks...@quantcast.com>
-smoke-tests: jay vyas, david capwell
-spark: jay vyas, youngwoo kim
-sqoop: sean mackrory, youngwoo kim
-tajo: yeongeon kim
-test-artifacts and test-execution repos:
-tez: evans ye, oflebbe
-vagrant recipes: jay vyas, evans ye
-ycsb: youngwoo kim
-zookeeper: sean mackrory, rvs
-elasticsearch: hernan vivani
diff --git a/NOTICE b/NOTICE
deleted file mode 100644
index dc7c948..0000000
--- a/NOTICE
+++ /dev/null
@@ -1,14 +0,0 @@
-Apache Bigtop
-Copyright 2014, The Apache Software Foundation
-Portions Copyright 2015-2016 Canonical Ltd.
-
-This product includes software developed at
-The Apache Software Foundation (http://www.apache.org/).
-
-In addition, this product includes files licensed under:
-
-* The FreeBSD Documentation License
-  https://www.freebsd.org/copyright/freebsd-doc-license.html
-
-* The MIT License
-  https://opensource.org/licenses/MIT
diff --git a/README.md b/README.md
deleted file mode 100644
index 97d53d7..0000000
--- a/README.md
+++ /dev/null
@@ -1,271 +0,0 @@
-[![Travis CI](https://img.shields.io/travis/apache/bigtop.svg?branch=master)](https://travis-ci.org/apache/bigtop)
-
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements. See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License. You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-
-[Apache Bigtop](http://bigtop.apache.org/)
-==========================================
-
-...is a project for the development of packaging and tests of the [Apache Hadoop](http://hadoop.apache.org/) ecosystem.
-
-The primary goal of Apache Bigtop is to build a community around the packaging and interoperability testing of Apache Hadoop-related projects. This includes testing at various levels (packaging, platform, runtime, upgrade, etc...) developed by a community with a focus on the system as a whole, rather than individual projects.
-
-Immediately Get Started with Deployment and Smoke Testing of BigTop
-===================================================================
-
-The simplest way to get a feel for how bigtop works, is to just cd into `provisioner` and try out the recipes under vagrant or docker.  Each one rapidly spins up, and runs the bigtop smoke tests on, a local bigtop based big data distribution. Once you get the gist, you can hack around with the recipes to learn how the puppet/rpm/smoke-tests all work together, going deeper into the components you are interested in as described below.
-
-Quick overview of source code directories
-=========================================
-
-* __bigtop-deploy__ : deployment scripts and puppet stuff for Apache Bigtop.
-* __bigtop-packages__ : RPM/DEB specifications for Apache Bigtop subcomponents.
-* __bigtop-test-framework__ : The source code for the iTest utilities (framework used by smoke tests).
-* __bigtop-tests__ :
-* __test-artifacts__ : source for tests.
-* __test-execution__ : maven pom drivers for running the integration tests found in test-artifacts.
-* __bigtop-toolchain__ : puppet scripts for setting up an instance which can build Apache Bigtop, sets up utils like jdk/maven/protobufs/...
-* __provisioner__ : Vagrant and Docker Provisioner that automatically spin up Hadoop environment with one click.
-* __docker__ : Dockerfiles and Docker Sandbox build scripts.
-
-Also, there is a new project underway, Apache Bigtop blueprints, which aims to create templates/examples that demonstrate/compare various Apache Hadoop ecosystem components with one another.
-
-Contributing
-============
-
-There are lots of ways to contribute.  People with different expertise can help with various subprojects:
-
-* __puppet__ : Much of the Apache Bigtop deploy and packaging tools use puppet to bootstrap and set up a cluster. But recipes for other tools are also welcome (ie. Chef, Ansible, etc.)
-* __groovy__ : Primary language used to write the Apache Bigtop smokes and itest framework.
-* __maven__ : Used to build Apache Bigtop smokes and also to define the high level Apache Bigtop project.
-* __RPM/DEB__ : Used to package Apache Hadoop ecosystem related projects into GNU/Linux installable packages for most popular GNU/Linux distributions. So one could add a new project or improve existing packages.
-* __hadoop__ : Apache Hadoop users can also contribute by using the Apache Bigtop smokes, improving them, and evaluating their breadth.
-* __contributing your workloads__ : Contributing your workloads enable us to tests projects against real use cases and enable you to have people verifying the use cases you care about are always working.
-* __documentation__ : We are always in need of a better documentation!
-* __giving feedback__ : Tell us how you use Apache Bigtop, what was great and what was not so great. Also, what are you expecting from it and what would you like to see in the future?
-
-Also, opening [JIRA's](https://issues.apache.org/jira/browse/BIGTOP) and getting started by posting on the mailing list is helpful.
-
-What do people use Apache Bigtop for?
-==============================
-
-You can go to the [Apache Bigtop website](http://bigtop.apache.org/) for notes on how to do "common" tasks like:
-
-  * Apache Hadoop App developers: Download an Apache Bigtop built Apache Hadoop 2.0 VM from the website, so you can have a running psuedodistributed Apache Hadoop cluster to test your code on.
-  * Cluster administers or deployment gurus: Run the Apache Bigtop smoke tests to ensure that your cluster is working.
-  * Vendors: Build your own Apache Hadoop distribution, customized from Apache Bigtop bits.
-
-Getting Started
-===============
-
-Below are some recipes for getting started with using Apache Bigtop. As Apache Bigtop has different subprojects, these recipes will continue to evolve.
-For specific questions it's always a good idea to ping the mailing list at dev-subscribe@bigtop.apache.org to get some immediate feedback, or [open a JIRA](https://issues.apache.org/jira/browse/BIGTOP).
-
-For Users: Running the smoke tests
------------------------------------
-
-The simplest way to test bigtop is described in bigtop-tests/smoke-tests/README file
-
-For integration (API level) testing with maven, read on.
-
-For Users: Running the integration tests
------------------------------------------
-
-WARNING: since testing packages requires installing them on a live system it is highly recommended to use VMs for that. Testing Apache Bigtop is done using iTest framework. The tests are organized in maven submodules, with one submodule per Apache Bigtop component.  The bigtop-tests/test-execution/smokes/pom.xml defines all submodules to be tested, and each submodule is in its own directory under smokes/, for example:
-
-*smokes/hadoop/pom.xml*
-*smokes/hive/pom.xml*
-*... and so on.*
-
-* New way (with Gradle build in place)
-  * Step 1: install smoke tests for one or more components
-    * Example 1:
-
-        gradle installTestArtifacts
-
-    * Example 2: Installing just Hadoop-specific smoke tests
-
-        gradle install-hadoop
-
-  * Step 2: Run the the smoke tests on your cluster (see Step 3 and/or Step 4 below)
-
-  We are on the route of migrating subprojects under top-level gradle build. Currently
-  converted projects could be listed by running
-
-        gradle projects
-
-  To see the list of tasks in a subproject, ie itest-common, you can run
-
-        gradle itest-common:tasks
-
-* Old Way
-  * Step 1: Build the smokes with snapshots.  This ensures that all transitive dependencies etc.. are in your repo
-
-        mvn clean install -DskipTests -DskipITs -DperformRelease -f ./bigtop-test-framework/pom.xml
-        mvn clean install -DskipTests -DskipITs -DperformRelease -f ./test-artifacts/pom.xml
-
-  * Step 2: Now, rebuild in "offline" mode.  This will make sure that your local changes to bigtop are embeded in the changes.
-
-        mvn clean install -DskipTests -DskipITs -DperformRelease -o -nsu -f ./bigtop-test-framework/pom.xml
-        mvn clean install -DskipTests -DskipITs -DperformRelease -o -nsu -f ./bigtop-tests/test-artifacts/pom.xml
-
-  * Step 3: Now, you can run the smoke tests on your cluster.
-    * Example 1: Running all the smoke tests with TRACE level logging (shows std out from each mr job).
-
-            mvn clean verify -Dorg.apache.bigtop.itest.log4j.level=TRACE -f ./bigtop/bigtop-tests/test-execution/smokes/pom.xml
-
-    * Just running hadoop examples, nothing else.
-
-            mvn clean verify -D'org.apache.maven-failsafe-plugin.testInclude=**/*TestHadoopExamples*' -f bigtop-tests/test-execution/smokes/hadoop/pom.xml
-
-    Note: A minor bug/issue: you need the "testInclude" regular expression above, even if you don't want to customize the tests,
-    since existing test names don't follow the maven integration test naming convention of IT*, but instead, follow the surefire (unit test) convention of Test*.
-
-For Users: Creating Your Own Apache Hadoop Environment
------------------------------------------------
-
-Another common use case for Apache Bigtop is creating / setting up your own Apache Hadoop distribution.  
-For details on this, check out the bigtop-deploy/README.md file, which describes how to use the puppet repos
-to create and setup your VMs.
-You can also try out provisioner to quickly get the idea how it works.
-
-For Developers: Building the entire distribution from scratch
--------------------------------------------------------------
-
-Packages have been built for CentOS, Fedora, OpenSUSE, Ubuntu, and Debian. They can probably be built for other platforms as well. Some of the binary artifacts might be compatible with other closely related distributions.
-
-__On all systems, Building Apache Bigtop requires certain set of tools__
-
-  To bootstrap the development environment from scratch execute
-
-    ./gradlew toolchain
-
-  This build task expected Puppet to be installed; user has to have sudo permissions. The task will pull down and install
-  all development dependencies, frameworks and SDKs, required to build the stack on your platform.
-
-  Before executing the above command, user can use the following script to install Puppet:
-
-    sudo bigtop_toolchain/bin/puppetize.sh
-
-  Note for CentOS (and RHEL, which is not supported officially but on a best effort basis) 8 users: on these distros,
-  puppetize.sh installs the puppet command into /opt/puppetlabs/bin, which is not included usually in secure_path defined in /etc/sudoers.
-  So users may have to add that path to secure_path manually.
-  Also, RHEL 8 users may have to enable their subscriptions themselves for using EPEL.
-  cf. https://fedoraproject.org/wiki/EPEL#How_can_I_use_these_extra_packages.3F
-
-  To immediately set environment after running toolchain, run
-
-    . /etc/profile.d/bigtop.sh
-
-* __Building packages__ : `gradle [component-name]-pkg`
-
-  If -Dbuildwithdeps=true is set, the Gradle will follow the order of the build specified in
-  the "dependencies" section of bigtop.bom file. Otherwise just a single component will get build (original behavior).
-
-  To use an alternative definition of a stack composition (aka BOM), specify its
-  name with -Dbomfile=<filename> system property in the build time.
-
-  You can visualize all tasks dependencies by running `gradle tasks --all`
-* __Building local YUM/APT repositories__ : `gradle [yum|apt]`
-
-* __Recommended build environments__
-
-  Bigtop provides "development in the can" environments, using Docker containers.
-  These have the build tools set by the toolchain, as well as the user and build
-  environment configured and cached. All currently supported OSes could be pulled
-  from official Bigtop repository at https://hub.docker.com/r/bigtop/slaves/tags/
-
-  To build a component (bigtop-groovy) for a particular OS (ubuntu-16.04) you can
-  run the following from a clone of Bigtop workspace (assuming your system has
-  Docker engine setup and working)
-  ```docker run --rm -u jenkins:jenkins -v `pwd`:/ws --workdir /ws bigtop/slaves:trunk-ubuntu-16.04
-  bash -l -c './gradlew allclean ; ./gradlew bigtop-groovy-pkg'```
-
-For Developers: Building and modifying the web site
----------------------------------------------------
-
-The website can be built by running `mvn site:site` from the root directory of the
-project.  The main page can be accessed from "project_root/target/site/index.html".
-
-The source for the website is located in "project_root/src/site/".
-
-
-For Developers: Building a component from Git repository
---------------------------------------------------------
-
-To fetch source from a Git repository, there're two ways to achieve this:
-a). modify `bigtop.bom` and add JSON snippets to your component/package, or
-b). specify properties at command line
-
-* __bigtop.bom__
-
-Add following JSON snippets to the desired component/package:
-
-```
-git     { repo = ""; ref = ""; dir = ""; commit_hash = "" }
-```
-
-  * `repo` - SSH, HTTP or local path to Git repo.
-  * `ref` - branch, tag or commit hash to check out.
-  * `dir` - [OPTIONAL] directory name to write source into.
-  * `commit_hash` - [OPTIONAL] a commit hash to reset to.
-
-Some packages have different names for source directory and source tarball
-(`hbase-0.98.5-src.tar.gz` contains `hbase-0.98.5` directory).
-By default source will be fetched in a directory named by `tarball { source = TARBALL_SRC }`
-without `.t*` extension.
-To explicitly set directory name use the `dir` option.
-
-When `commit_hash` specified, the repo to build the package will be reset to the commit hash.
-
-Example for HBase:
-
-```
-      name    = 'hbase'
-      version { base = '1.3.2'; pkg = base; release = 1 }
-      git     { repo = "https://github.com/apache/hbase.git"
-                ref  = "branch-1.3"
-                dir  = "${name}-${version.base}"
-                commit_hash = "1bedb5bfbb5a99067e7bc54718c3124f632b6e17"
-              }
-```
-
-* __command line__
-
-
-```
-./gradlew COMPONENT-pkg -Pgit_repo="" -Pgit_ref="" -Pgit_dir="" -Pgit_commit_hash="" -Pbase_version=""
-```
-
-Where `git_repo`, `git_ref`, `git_dir`, and `git_commit_hash` are exactly the same with what we set in JSON.
-And `base_version` is to overwrite:
-```
-      version { base = ''}
-```
-
-Example for Kafka:
-
-```
-./gradlew kafka-pkg-ind -Pgit_repo=https://github.com/apache/kafka.git -Pgit_ref=trunk -Pgit_commit_hash=dc0601a1c604bea3f426ed25b6c20176ff444079 -Pbase_version=2.2.0
-```
-
-You can mix both ways to build from Git, but command line always overwrites `bigtop.bom`.
-
-
-Contact us
-----------
-
-You can get in touch with us on [the Apache Bigtop mailing lists](http://bigtop.apache.org/mail-lists.html).
diff --git a/bigtop-bigpetstore/README.md b/bigtop-bigpetstore/README.md
deleted file mode 100644
index e032f84..0000000
--- a/bigtop-bigpetstore/README.md
+++ /dev/null
@@ -1,49 +0,0 @@
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-BigPetStore
-============
-
-BigPetStore is a family of example applications for the Hadoop/Spark
-ecosystems. BigPetStore generates and analyzes synthetic transaction data for
-a fictional chain of petstores.
-
-BigPetStore has the following aims:
-
-* Serve as a demo application to showcase capabilities of the BigTop distribution
-* Perform integration testing for BigTop's components
-* Server as a template for building / packaging Hadoop/Spark applications
-* Provide scalable generation of complex synthetic data
-* Examples for using and integrating components such as Pig, Hive, Spark SQL, etc.
-* Examples of how to perform popular analytics tasks
-
-BigPetStore has the following components to date:
-
-* Gradle build systems supporting Java, Scala, and Groovy
-* Data generators
-* Analytics
-  * ETL
-  * Item Recommenders
-
-The BigPetStore application was originally developed for MapReduce and associated
-components such as Pig, Hive, Mahout, Crunch, etc. With the increasing popularity
-and importance of Spark, BigPetStore has been expanded to support Spark.  To support
-the use case of deploying to pure MapReduce or Spark environments, we've elected to
-separate the MapReduce and Spark support into separate applications. You can find the
-two applications, along with futher documentation, under `bigpetstore-mapreduce` and
-`bigpetstore-spark`, respectively.
-
-
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/BPS_analytics.pig b/bigtop-bigpetstore/bigpetstore-mapreduce/BPS_analytics.pig
deleted file mode 100755
index d275a73..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/BPS_analytics.pig
+++ /dev/null
@@ -1,79 +0,0 @@
-----------------------------------------------------------------------------
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements.  See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License"); you may not use this file except in compliance with
--- the License.  You may obtain a copy of the License at
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
------------------------------------------------------------------------------
-
--- This is the analytics script that BigPetStore uses as an example for
--- demos of how to do ad-hoc analytics on the cleaned transaction data.
--- It is used in conjunction with the big pet store web app, soon to be
--- added to apache bigtop (As of 4/12/2014, the
--- corresponding web app to consume this scripts output is
--- in jayunit100.github.io/bigpetstore).
-
--- invoke with two arguments, the input file, and the output file. -param input=bps/cleaned -param output=bps/analytics
-
--- FYI...
--- If you run into errors, you can see them in
--- ./target/failsafe-reports/TEST-org.bigtop.bigpetstore.integration.BigPetStorePigIT.xml
-
--- First , we load data in from a file, as tuples.
--- in pig, relations like tables in a relational database
--- so each relation is just a bunch of tuples.
--- in this case csvdata will be a relation,
--- where each tuple is a single petstore transaction.
-csvdata =
-    LOAD '$input' using PigStorage()
-        AS (
-          dump:chararray,
-          state:chararray,
-          transaction:int,
-          custId:long,
-          fname:chararray,
-          lname:chararray,
-          productId:int,
-          product:chararray,
-          price:float,
-          date:chararray);
-
--- RESULT:
--- (BigPetStore,storeCode_AK,1,11,jay,guy,3,dog-food,10.5,Thu Dec 18 12:17:10 EST 1969)
--- ...
-
--- Okay! Now lets group our data so we can do some stats.
--- lets create a new relation,
--- where each tuple will contain all transactions for a product in a state.
-
-state_product = group csvdata by ( state, product ) ;
-
--- RESULT
--- ((storeCode_AK,dog-food) , {(BigPetStore,storeCode_AK,1,11,jay,guy,3,dog-food,10.5,Thu Dec 18 12:17:10 EST 1969)}) --
--- ...
-
-
--- Okay now lets make some summary stats so that the boss man can
--- decide which products are hottest in which states.
-
--- Note that for the "groups", we tease out each individual field here for formatting with
--- the BigPetStore visualization app.
-summary1 = FOREACH state_product generate STRSPLIT(group.state,'_').$1 as sp, group.product, COUNT($1);
-
-
--- Okay, the stats look like this.  Lets clean them up.
--- (storeCode_AK,cat-food)      2530
--- (storeCode_AK,dog-food)      2540
--- (storeCode_AK,fuzzy-collar)     2495
-
-dump summary1;
-
-store summary1 into '$output';
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/README.md b/bigtop-bigpetstore/bigpetstore-mapreduce/README.md
deleted file mode 100644
index f725be3..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/README.md
+++ /dev/null
@@ -1,201 +0,0 @@
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements. See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License. You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-(See accompanying source code for licensing information)
-
-BigPetStore
-============
-
-Apache Bigtop/Hadoop Ecosystem Demo
------------------------------------
-This software is created to demonstrate Apache Bigtop for processing
-big data sets.
-
-Architecture
-------------
-The application consists of the following modules
-
-* generator: generates raw data on the dfs
-* recommendations: Apache Mahout demo code for generating recommendations by anaylyzing the transaction records. This feature can be tracked at this [`JIRA` issue](https://issues.apache.org/jira/browse/BIGTOP-1272)
-* Pig: demo code for processing the data using Apache Pig
-* Hive: demo code for processing the data using Apache Hive. This part is not complete yet. We are working on it. You can track it using this [`JIRA` issue](https://issues.apache.org/jira/browse/BIGTOP-1270)
-* Crunch: demo code for processing the data using Apache Crunch
-
-Build Instructions
-------------------
-
-You'll need to have version 2.4 of  [`gradle`](http://www.gradle.org/downloads) installed and set-up correctly in order to follow along these instructions.
-We could have used the [`gradle-wrapper`](http://www.gradle.org/docs/current/userguide/gradle_wrapper.html) to avoid having to install `gradle`, but the `bigtop` project includes all `gradle*` directories in `.gitignore`. So, that's not going to work.
-
-### Build the JAR
-
-`gradle clean build` will build the bigpetstore `jar`. The `jar` will be located in the `build\libs` directory.
-
-### Run Intergration Tests With
-  * Pig profile: `gradle clean integrationTest -P ITProfile=pig`
-  * Mahout Profile: `gradle clean integrationTest -P ITProfile=mahout`
-  * Crunch profile: Not Implemented Yet
-  * Hive profile: Not implemented yet.
-
-If you don't specify any profile-name, or if you specify an invalid-name for the `integrationTest` task, no integration tests will be run.
-
-*Note:* At this stage, only the `Pig` and `Mahout` profiles are working. Will continue to update this area as further work is completed.
-
-For Eclipse Users
------------------
-
-1. Run `gradle eclipse` to create an eclipse project.
-2. Import the project into eclipse.
-
-*Note* whenever you modify the dependencies, you will need to run the `gradle eclipse` again. Refresh the project after doing so. You'd also need to have the `scala` plugin installed. Also, having a `gradle` plugin would be quite useful as well, for ex. when you want to update dependencies.
-
-High level summary
-------------------
-
-The bigpetstore project exemplifies the hadoop ecosystem for newcomers, and also for benchmarking and
-comparing functional space of tools.
-
-The end goal is to run many different implementations of each phase
-using different tools, thus exemplifying overlap of tools in the hadoop ecosystem, and allowing people to benchmark/compare tools
-using a common framework and easily understood use case
-
-
-How it works (To Do)
---------------------
-
-### Phase 1: Generating pet store data:
-
-The first step is to generate a raw data set.  This is done by the "GeneratePetStoreTransactionsInputFormat":
-
-The first MapReduce job in the pipeline runs a simple job which takes this input format and forwards
-its output.  The result is a list of "transactions".  Each transaction is a tuple of the format
-
-  *{state,name,date,price,product}.*
-
-### Phase 2: Processing the data
-
-The next phase of the application processes the data to create basic aggregations. For example with both pig and hive these could easily include
-
-- *Number of transactions by state* or
-- *Most valuable customer by state* or
-- *Most popular items by state*
-
-
-### Phase 3: Clustering the states by all fields
-
-  Now, say we want to cluster the states, so as to put different states into different buying categories
-  for our marketing team to deal with differently.
-
-### Phase 4: Visualizing the Data in D3.
-
- - try it [on the gh-pages branch](http://jayunit100.github.io/bigpetstore/)
-
-
-Running on a hadoop cluster
----------------------------
-
-*Note:* For running the code using the `hadoop jar` command instead of the `gradle` tasks, you will need to set the classpath appropriately. The discussion after [this comment][jira-mahout] in JIRA could also be useful apart from these instructions.
-
-### Build the fat-jar
-
-We are going to use a fat-jar in order to avoid specifying the entire classpath ourselves.
-
-The fat-jar is required when we are running the application on a hadoop cluster. The other way would be to specify all the dependencies (including the transitive ones) manually while running the hadoop job. Fat-jars make it easier to bundle almost all the dependencies inside the distribution jar itself.
-
-```
-gradle clean shadowJar -Pfor-cluster
-```
-
-This command will build the fat-jar with all the dependencies bundled in except the hadoop, mahout and pig dependencies, which we'll specify using `-libjars` option while running the hadoop job. These dependencies are excluded to avoid conflicts with the jars provided by hadoop itself.
-
-The generated jar will be inside the `build/libs` dir, with name like `BigPetStore-x.x.x-SNAPSHOT-all.jar`. For the remainig discussion I'll refer to this jar by `bps.jar`.
-
-### Get the mahout and pig jars
-
-You'll need both mahout and pig jars with the hadoop classes excluded. Commonly, you can find both of these in their respective distros. The required pig jar is generally named like `pig-x.x.x-withouthadoop.jar` and the mahout jar would be named like `mahout-core-job.jar`. If you want, you can build those yourself by following the instructions in [this JIRA comment][jira-mahout]]. For the remaining discussion, I am going to refer to these two jars by `pig-withouthadoop.jar` and `mahout-c [...]
-
-### Setup the classpath for hadoop nodes in the cluster
-
-```
-export JARS="/usr/lib/pig/pig-withouthadoop.jar,/usr/lib/mahout/mahout-core-job.jar"
-```
-
-We also need these jars to be present on the client side to kick-off the jobs. Reusing the `JARS` variable to put the same jars on the client classpath.
-
-```
-export HADOOP_CLASSPATH=`echo $JARS | sed s/,/:/g`
-```
-
-### Generate the data
-
-```
-hadoop jar bps.jar org.apache.bigtop.bigpetstore.generator.BPSGenerator 1000000 bigpetstore/gen
-```
-
-### Clean with pig
-
-```
-hadoop jar bps.jar org.apache.bigtop.bigpetstore.etl.PigCSVCleaner -libjars $JARS bigpetstore/gen/ bigpetstore/ custom_pigscript.pig
-```
-
-### Analyze and generate recommendations with mahout
-
-```
-hadoop jar bps.jar org.apache.bigtop.bigpetstore.recommend.ItemRecommender -libjars $JARS  bigpetstore/pig/Mahout bigpetstore/Mahout/AlsFactorization bigpetstore/Mahout/AlsRecommendations
-```
-
-
-... (will add more steps as we add more phases to the workflow) ...
-
-
-Example of running in EMR
---------------------------
-- Put the jar in s3.  Right now there is a copy of it at the url below.
-
-- Download the elastic-mapreduce ruby shell script.
-create your "credentials.json" file.
-
-Now run this to generate 1,000,000 pet store transactions:
-
-./elastic-mapreduce --create --jar s3://bigpetstore/bigpetstore.jar \
---main-class org.apache.bigtop.bigpetstore.generator.BPSGenerator \
---num-instances 10  \
---arg 1000000 \
---arg s3://bigpetstore/data/generated \
---hadoop-version "2.2.0"  \
---master-instance-type m1.medium \
---slave-instance-type m1.medium
-
-...Now lets clean the data with pig...
-
-Replace the above "main-class", and "--arg" options with
---main-class org.apache.bigtop.bigpetstore.etl.PigCSVCleaner
---arg s3://bigpetstore/data/generated
---arg s3://bigpetstore/data/pig_out
-(optional, you can send a script referencing the cleaned $input path to do some
-custom analytics, see the BPS_Analytics.pig script and companion
-http://jayunit100.github.io/bigpetstore) as an example).
---arg s3://path_to_custom_analytics_script.pig
-
-(note about pig: We support custom pig scripts.... for EMR, custom pig scripts will need to point to a
-local path, so youll have to put that script on the machine as part
-of EMR setup w/ a custom script).
-
-...
-
-And so on.
-
-
-[jira-mahout]: https://issues.apache.org/jira/browse/BIGTOP-1272?focusedCommentId=14076023&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1407602
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/arch.dot b/bigtop-bigpetstore/bigpetstore-mapreduce/arch.dot
deleted file mode 100644
index 5192806..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/arch.dot
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
-* Licensed to the Apache Software Foundation (ASF) under one or more
-* contributor license agreements.  See the NOTICE file distributed with
-* this work for additional information regarding copyright ownership.
-* The ASF licenses this file to You under the Apache License, Version 2.0
-* (the "License"); you may not use this file except in compliance with
-* the License.  You may obtain a copy of the License at
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-digraph bigpetstore {
-
-   node [shape=record];
-
-
-   BPSAnalytics [label="BPSAnalytics.pig" ,style="rounded, filled", shape=diamond];
-   CUSTOMER_PAGE [label="CUSTOMER_PAGE|json|CUSTOMER_PAGE/part*"];
-   DIRTY_CSV [label="DIRTY_CSV|fname   lname -prod , price ,prod,..|generated/part*"];
-   CSV [label="CSV|fname,lname,prod,price,date,xcoord,ycoord,...|cleaned/part*"];
-   MAHOUT_VIEW_INPUT [label="MAHOUT_VIEW  |  (user-id) 10001  (product-id) 203  (implicit-rating) 1 |  cleaned/Mahout/part*" ];
-   MAHOUT_ALS [label="Parallel ALS Recommender output  | (user-id) 10001  [(product-id) 201: (recommendation-strength 0-1)0.546] | Mahout/AlsRecommendations/part*" ];
-
-   Generate -> DIRTY_CSV [label="hadoop jar bigpetstore.jar org.apache.bigtop.bigpetstore.generator.BPSGenerator 100 bps/generated/"] ;
-   DIRTY_CSV -> pig [label="hadoop jar bigpetstore.jar org.apache.bigtop.bigpetstore.etl.PigCSVCleaner bps/generated/ bps/cleaned/ "];
-
-   pig -> CSV [label="pig query to clean up generated transaction records"];
-   pig -> MAHOUT_VIEW_INPUT [label="pig query to produce mahout input format"];
-
-   MAHOUT_VIEW_INPUT -> ParallelALSFactorizationJob [label="hadoop jar bigpetstore.jar org.apache.bigtop.bigpetstore.recommend.ItemRecommender cleaned/Mahout Mahout/AlsFactorization Mahout/AlsRecommendations"];
-   ParallelALSFactorizationJob -> "Mahout RecommenderJob"
-   "Mahout RecommenderJob" -> MAHOUT_ALS
-
-   CSV -> BPSAnalytics;
-   BPSAnalytics  -> pig_job2;
-   pig_job2  -> CUSTOMER_PAGE [label=""];
-}
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/build.gradle b/bigtop-bigpetstore/bigpetstore-mapreduce/build.gradle
deleted file mode 100644
index 24c9a01..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/build.gradle
+++ /dev/null
@@ -1,305 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-apply plugin: "java"
-apply plugin: "eclipse"
-// TODO add idea module config.
-apply plugin: "idea"
-apply plugin: "scala"
-apply plugin: 'com.github.johnrengelman.shadow'
-
-buildscript {
-  repositories { jcenter() }
-  dependencies {
-    classpath 'com.github.jengelman.gradle.plugins:shadow:1.0.2'
-  }
-}
-
-// Read the groupId and version properties from the "parent" bigtop project.
-// It would be better if there was some better way of doing this. Howvever,
-// at this point, we have to do this (or some variation thereof) since gradle
-// projects can't have maven projects as parents (AFAIK. If there is a way to do it,
-// it doesn't seem to be well-documented).
-def setProjectProperties() {
-    Node xml = new XmlParser().parse("../../pom.xml")
-    group = xml.groupId.first().value().first()
-    version = xml.version.first().value().first()
-}
-
-setProjectProperties()
-description = """"""
-
-// We are using 1.7 as gradle can't play well when java 8 and scala are combined.
-// There is an open issue here: http://issues.gradle.org/browse/GRADLE-3023
-// There is talk of this being resolved in the next version of gradle. Till then,
-// we are stuck with java 7. But we do have scala if we want more syntactic sugar.
-sourceCompatibility = 1.7
-targetCompatibility = 1.7
-
-// Specify any additional project properties.
-ext {
-    slf4jVersion = "1.7.5"
-    guavaVersion = "15.0"
-    datanucleusVersion = "3.2.2"
-    datanucleusJpaVersion = "3.2.1"
-    bonecpVersion = "0.8.0.RELEASE"
-    derbyVersion = "10.10.1.1"
-
-    // from horton-works repo. They compile mahout-core against hadoop2.x. These
-    // mahout is compiled against 2.4.0
-    hadoopVersion = "2.4.0.2.1.2.0-402"
-    mahoutVersion = "0.9.0.2.1.2.0-402"
-}
-
-repositories {
-    mavenCentral()
-    maven {
-        url "http://repo.hortonworks.com/content/repositories/releases/"
-    }
-}
-
-tasks.withType(AbstractCompile) {
-    options.encoding = 'UTF-8'
-    options.compilerArgs << "-Xlint:all"
-}
-
-tasks.withType(ScalaCompile) {
-    // Enables incremental compilation.
-    // http://www.gradle.org/docs/current/userguide/userguide_single.html#N12F78
-    scalaCompileOptions.useAnt = false
-}
-
-tasks.withType(Test) {
-    testLogging {
-        // Uncomment this if you want to see the console output from the tests.
-        // showStandardStreams = true
-        events "passed", "skipped", "failed"
-        // show standard out and standard error of the test JVM(s) on the console
-        //showStandardStreams = true
-    }
-}
-
-test {
-    exclude "**/*TestPig.java", "**/*TestHiveEmbedded.java", "**/*TestCrunch.java", "**/*TestPetStoreTransactionGeneratorJob.java"
-}
-
-// Create a separate source-set for the src/integrationTest set of classes. The convention here
-// is that gradle will look for a directory with the same name as that of the specified source-set
-// under the 'src' directory. So, in this case, it will look for a directory named 'src/integrationTest'
-// since the name of the source-set is 'integrationTest'
-sourceSets {
-    main {
-        java.srcDirs = [];
-        scala.srcDirs = ["src/main/scala", "src/main/java"]
-    }
-    // The main and test source-sets are configured by both java and scala plugins. They contain
-    // all the src/main and src/test classes. The following statements make all of those classes
-    // available on the classpath for the integration-tests, for both java and scala.
-    integrationTest {
-        java {
-            compileClasspath += main.output + test.output
-            runtimeClasspath += main.output + test.output
-        }
-        scala {
-            compileClasspath += main.output + test.output
-            runtimeClasspath += main.output + test.output
-        }
-    }
-}
-
-// Creating a source-set automatically add a couple of corresponding configurations (when java/scala
-// plugins are applied). The convention for these configurations is <sourceSetName>Compile and
-// <sourceSetName>Runtime. The following statements declare that all the dependencies from the
-// testCompile configuration will now be available for integrationTestCompile, and all the
-// dependencies (and other configuration that we might have provided) for testRuntime will be
-// available for integrationTestRuntime. For ex. the testCompile configuration has a dependency on
-// jUnit and scalatest. This makes them available for the integration tests as well.
-configurations {
-    integrationTestCompile {
-        extendsFrom testCompile
-    }
-
-    integrationTestRuntime {
-        extendsFrom integrationTestCompile, testRuntime
-    }
-}
-
-// To see the API that is being used here, consult the following docs
-// http://www.gradle.org/docs/current/dsl/org.gradle.api.artifacts.ResolutionStrategy.html
-def updateDependencyVersion(dependencyDetails, dependencyString) {
-    def parts = dependencyString.split(':')
-    def group = parts[0]
-    def name = parts[1]
-    def version = parts[2]
-    if (dependencyDetails.requested.group == group
-            && dependencyDetails.requested.name == name) {
-        dependencyDetails.useVersion version
-    }
-}
-
-def setupPigIntegrationTestDependencyVersions(dependencyResolveDetails) {
-    // This is the way we override the dependencies.
-    updateDependencyVersion dependencyResolveDetails, "joda-time:joda-time:2.2"
-}
-
-def setupCrunchIntegrationTestDependencyVersions(dependencyResolveDetails) {
-    // Specify any dependencies that you want to override for crunch integration tests.
-}
-
-def setupMahoutIntegrationTestDependencyVersions(dependencyResolveDetails) {
-    // Specify any dependencies that you want to override for mahout integration tests.
-}
-
-
-task integrationTest(type: Test, dependsOn: test) {
-
-    testClassesDir = sourceSets.integrationTest.output.classesDir
-    classpath = sourceSets.integrationTest.runtimeClasspath
-
-    if(!project.hasProperty('ITProfile')) {
-        // skip integration-tests if no profile has been specified.
-        integrationTest.onlyIf { false }
-        return;
-    }
-
-    def patternsToInclude
-    def dependencyConfigClosure
-    def skipDependencyUpdates = false
-    // Select the pattern for test classes that should be executed, and the dependency
-    // configuration function to be called based on the profile name specified at the command line.
-    switch (project.ITProfile) {
-        case "pig":
-            patternsToInclude = "*PigIT*"
-            dependencyConfigClosure = { setupPigIntegrationTestDependencyVersions(it) }
-
-            //In pig integration tests, the custom pig script seems to have high
-            //memory requirements.
-            minHeapSize = "1000m"
-            maxHeapSize = "4000m"
-            break
-        case "crunch":
-            patternsToInclude = "*CrunchIT*"
-            dependencyConfigClosure = { setupCrunchIntegrationTestDependencyVersions(it) }
-            break
-        case "mahout":
-            patternsToInclude = "*MahoutIT*"
-            dependencyConfigClosure = { setupMahoutIntegrationTestDependencyVersions(it) }
-            break
-        // skip integration-tests if the passed in profile-name is not valid
-        default: integrationTest.onlyIf { false }; return
-    }
-
-
-    filter { includeTestsMatching patternsToInclude }
-
-    // This is the standard way gradle allows overriding each specific dependency.
-    // see: http://www.gradle.org/docs/current/dsl/org.gradle.api.artifacts.ResolutionStrategy.html
-    project.configurations.all {
-        resolutionStrategy {
-            eachDependency {
-                dependencyConfigClosure(it)
-            }
-        }
-    }
-}
-
-dependencies {
-    compile "org.kohsuke:graphviz-api:1.0"
-    compile "org.apache.crunch:crunch-core:0.9.0-hadoop2"
-    compile "com.jolbox:bonecp:${project.bonecpVersion}"
-    compile "org.apache.derby:derby:${project.derbyVersion}"
-    compile "com.google.guava:guava:${project.guavaVersion}"
-    compile "commons-lang:commons-lang:2.6"
-    compile "joda-time:joda-time:2.3"
-    compile "org.apache.commons:commons-lang3:3.1"
-    compile "com.google.protobuf:protobuf-java:2.5.0"
-    compile "commons-logging:commons-logging:1.1.3"
-    compile "com.thoughtworks.xstream:xstream:+"
-    compile "org.apache.lucene:lucene-core:+"
-    compile "org.apache.lucene:lucene-analyzers-common:+"
-    compile "org.apache.solr:solr-commons-csv:3.5.0"
-
-    compile group: "org.apache.pig", name: "pig", version: "0.12.0", classifier:"h2"
-    compile "dk.brics.automaton:automaton:1.11-8"
-    compile "org.slf4j:slf4j-api:${project.slf4jVersion}"
-    compile "log4j:log4j:1.2.12"
-    compile "org.slf4j:slf4j-log4j12:${project.slf4jVersion}"
-    compile "org.datanucleus:datanucleus-core:${project.datanucleusVersion}"
-    compile "org.datanucleus:datanucleus-rdbms:${project.datanucleusJpaVersion}"
-    compile "org.datanucleus:datanucleus-api-jdo:${project.datanucleusJpaVersion}"
-    compile "org.datanucleus:datanucleus-accessplatform-jdo-rdbms:${project.datanucleusJpaVersion}"
-    compile group: "org.apache.mrunit", name: "mrunit", version: "1.0.0", classifier:"hadoop2"
-
-    compile "org.jfairy:jfairy:0.2.4"
-
-    // from horton-works repo. They compile mahout-core against hadoop2.x
-    compile "org.apache.hadoop:hadoop-client:${hadoopVersion}"
-    compile "org.apache.mahout:mahout-core:${mahoutVersion}"
-
-    compile 'org.scala-lang:scala-library:2.11.0'
-
-    testCompile "junit:junit:4.11"
-    testCompile "org.hamcrest:hamcrest-all:1.3"
-    testCompile "org.scalatest:scalatest_2.11:2.1.7"
-}
-
-configurations {
-    hadoopClusterRuntime {
-	    // extendsFrom integrationTestRuntime
-	    if(project.hasProperty('for-cluster')) {
-		    excludeRules += [getGroup: { 'org.apache.crunch' }, getModule: { 'crunch-core' } ] as ExcludeRule
-		    excludeRules += [getGroup: { 'org.apache.pig' }, getModule: { 'pig' } ] as ExcludeRule
-		    excludeRules += [getGroup: { 'org.apache.mahout' }, getModule: { 'mahout-core' } ] as ExcludeRule
-		    excludeRules += [getGroup: { 'org.apache.hadoop' }, getModule: { 'hadoop-client' } ] as ExcludeRule
-		}
-    }
-}
-
-task listJars << {
-    configurations.shadow.each { println it.name }
-}
-
-def copyDependencyJarsForHadoopCluster() {
-    copy {
-        from configurations.hadoopClusterRuntime
-        into 'build/libs'
-    }
-}
-
-build {
-    doLast {
-        copyDependencyJarsForHadoopCluster()
-    }
-}
-
-eclipse {
-    classpath {
-        // Add the dependencies and the src dirs for the integrationTest source-set to the
-        // .classpath file that will be generated by the eclipse plugin.
-        plusConfigurations += [configurations.integrationTestCompile]
-        // Comment out the following two lines if you want to generate an eclipse project quickly.
-        downloadSources = true
-        downloadJavadoc = false
-    }
-}
-
-// shadowJar can merge files which have the same path but it differs only in case into a single jar,
-// but it causes a problem on some case-insensitive platforms such as Mac OS X.
-// The following function removes either of the duplicated path.
-shadowJar {
-    exclude 'META-INF/LICENSE'
-}
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/pom.xml b/bigtop-bigpetstore/bigpetstore-mapreduce/pom.xml
deleted file mode 100644
index aa6a627..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/pom.xml
+++ /dev/null
@@ -1,584 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-	Licensed to the Apache Software Foundation (ASF) under one or more
-	contributor license agreements. See the NOTICE file distributed with
-	this work for additional information regarding copyright ownership.
-	The ASF licenses this file to You under the Apache License, Version 2.0
-	(the "License"); you may not use this file except in compliance with
-	the License. You may obtain a copy of the License at
-
-	http://www.apache.org/licenses/LICENSE-2.0
-
-	Unless required by applicable law or agreed to in writing, software
-	distributed under the License is distributed on an "AS IS" BASIS,
-	WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	See the License for the specific language governing permissions and
-	limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<groupId>org.apache.bigtop</groupId>
-	<artifactId>BigPetStore</artifactId>
-        <version>1.5.0-SNAPSHOT</version>
-	<properties>
-		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-		<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
-		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-		<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
-		<slf4j.version>1.7.5</slf4j.version>
-		<guava.version>15.0</guava.version>
-		<hadoop.version>2.2.0</hadoop.version>
-		<hive.version>0.12.0</hive.version>
-		<datanucleus.version>3.2.2</datanucleus.version>
-		<datanucleus.jpa.version>3.2.1</datanucleus.jpa.version>
-		<bonecp.version>1.0.0-SNAPSHOT.RELEASE</bonecp.version>
-		<derby.version>10.10.1.1</derby.version>
-		<plugin.surefire.version>2.17</plugin.surefire.version>
-	</properties>
-
-	<dependencies>
-		<dependency>
-			<groupId>org.kohsuke</groupId>
-			<artifactId>graphviz-api</artifactId>
-			<version>1.0</version>
-		</dependency>
-
-		<dependency>
-			<groupId>org.apache.crunch</groupId>
-			<artifactId>crunch-core</artifactId>
-			<version>0.9.0-hadoop2</version>
-		</dependency>
-
-		<!-- misc deps -->
-		<dependency>
-			<groupId>com.jolbox</groupId>
-			<artifactId>bonecp</artifactId>
-			<version>${bonecp.version}</version>
-		</dependency>
-
-		<dependency>
-			<groupId>org.apache.derby</groupId>
-			<artifactId>derby</artifactId>
-			<version>${derby.version}</version>
-		</dependency>
-
-		<dependency>
-			<groupId>com.google.guava</groupId>
-			<artifactId>guava</artifactId>
-			<version>${guava.version}</version>
-		</dependency>
-
-		<!-- From pig profile -->
-		<dependency>
-			<groupId>commons-lang</groupId>
-			<artifactId>commons-lang</artifactId>
-			<version>2.6</version>
-		</dependency>
-
-		<dependency>
-			<groupId>joda-time</groupId>
-			<artifactId>joda-time</artifactId>
-			<version>2.3</version>
-		</dependency>
-		<!-- end pig profile -->
-		<!-- From hive profile -->
-		<dependency>
-			<groupId>org.apache.commons</groupId>
-			<artifactId>commons-lang3</artifactId>
-			<version>3.1</version>
-		</dependency>
-		<!-- end hive profile -->
-		<!-- From Crunch profile -->
-		<dependency>
-			<groupId>com.google.protobuf</groupId>
-			<artifactId>protobuf-java</artifactId>
-			<version>2.5.0</version>
-		</dependency>
-		<!-- end crunch profile -->
-		<!-- From Mahout profile -->
-		<dependency>
-			<groupId>commons-logging</groupId>
-			<artifactId>commons-logging</artifactId>
-			<version>1.1.3</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.mahout</groupId>
-			<artifactId>mahout-math</artifactId>
-			<version>0.9</version>
-		</dependency>
-		<dependency>
-			<groupId>com.thoughtworks.xstream</groupId>
-			<artifactId>xstream</artifactId>
-			<version>LATEST</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.lucene</groupId>
-			<artifactId>lucene-core</artifactId>
-			<version>LATEST</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.lucene</groupId>
-			<artifactId>lucene-analyzers-common</artifactId>
-			<version>LATEST</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.mahout.commons</groupId>
-			<artifactId>commons-cli</artifactId>
-			<version>LATEST</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.commons</groupId>
-			<artifactId>commons-math3</artifactId>
-			<version>LATEST</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.solr</groupId>
-			<artifactId>solr-commons-csv</artifactId>
-			<version>3.5.0</version>
-		</dependency>
-		<!-- end Mahout profile -->
-
-		<!-- TODO ask question about this comment -->
-		<!-- We keep this at top level so that mvn eclipse:eclipse creates a nice
-			tidy project, but its a little messy. later we'll create a profile for eclipse
-			and move this (and other deps) into profiles as needed. Important: Remove
-			this dependency when running hive integration tests... -->
-		<dependency>
-			<groupId>org.apache.hadoop</groupId>
-			<artifactId>hadoop-client</artifactId>
-			<version>${hadoop.version}</version>
-		</dependency>
-		<!-- TODO ask question about this comment -->
-		<!-- mahout deps : may need to turn these on/off when testing mahout locally -->
-		<!-- For testing on my machine, I created a bigpetstore mahout jar which
-			is compiled for 2.2.0 . Or substitute this with the standard apache mahout-core
-			but not sure if it will work. -->
-		<dependency>
-			<groupId>org.apache.mahout</groupId>
-			<artifactId>mahout-core</artifactId>
-			<version>0.8</version>
-		</dependency>
-		<!-- pig deps -->
-		<dependency>
-			<groupId>org.apache.pig</groupId>
-			<artifactId>pig</artifactId>
-			<classifier>h2</classifier>
-			<version>0.12.0</version>
-		</dependency>
-
-		<!--logging -->
-
-		<dependency>
-			<groupId>org.slf4j</groupId>
-			<artifactId>slf4j-api</artifactId>
-			<version>${slf4j.version}</version>
-		</dependency>
-		<dependency>
-			<groupId>log4j</groupId>
-			<artifactId>log4j</artifactId>
-			<version>1.2.12</version>
-		</dependency>
-		<dependency>
-			<groupId>org.slf4j</groupId>
-			<artifactId>slf4j-log4j12</artifactId>
-			<version>${slf4j.version}</version>
-		</dependency>
-		<!-- hive -->
-		<dependency>
-			<groupId>org.apache.hive</groupId>
-			<artifactId>hive-common</artifactId>
-			<version>${hive.version}</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.hive</groupId>
-			<artifactId>hive-serde</artifactId>
-			<version>${hive.version}</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.hive</groupId>
-			<artifactId>hive-jdbc</artifactId>
-			<version>${hive.version}</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.hive</groupId>
-			<artifactId>hive-contrib</artifactId>
-			<version>${hive.version}</version>
-		</dependency>
-
-		<!-- datanucleus -->
-		<dependency>
-			<groupId>org.datanucleus</groupId>
-			<artifactId>datanucleus-core</artifactId>
-			<version>${datanucleus.version}</version>
-		</dependency>
-
-		<dependency>
-			<groupId>org.datanucleus</groupId>
-			<artifactId>datanucleus-rdbms</artifactId>
-			<version>${datanucleus.jpa.version}</version>
-		</dependency>
-
-		<dependency>
-			<groupId>org.datanucleus</groupId>
-			<artifactId>datanucleus-api-jdo</artifactId>
-			<version>${datanucleus.jpa.version}</version>
-		</dependency>
-
-		<!-- TODO eliminate this pom dependency -->
-		<dependency>
-			<groupId>org.datanucleus</groupId>
-			<artifactId>datanucleus-accessplatform-jdo-rdbms</artifactId>
-			<version>${datanucleus.jpa.version}</version>
-			<type>pom</type>
-		</dependency>
-
-		<!-- Unit test artifacts -->
-		<dependency>
-			<groupId>junit</groupId>
-			<artifactId>junit</artifactId>
-			<version>4.11</version>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.hamcrest</groupId>
-			<artifactId>hamcrest-all</artifactId>
-			<version>1.3</version>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.mrunit</groupId>
-			<artifactId>mrunit</artifactId>
-			<version>1.0.0</version>
-			<classifier>hadoop2</classifier>
-		</dependency>
-	</dependencies>
-
-	<build>
-		<extensions>
-			<extension>
-				<groupId>org.springframework.build.aws</groupId>
-				<artifactId>org.springframework.build.aws.maven</artifactId>
-				<version>3.0.0.RELEASE</version>
-			</extension>
-		</extensions>
-		<finalName>bigpetstore-${project.version}</finalName>
-		<plugins>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-release-plugin</artifactId>
-				<version>2.5</version>
-			</plugin>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-eclipse-plugin</artifactId>
-				<version>2.9</version>
-				<configuration>
-					<downloadSources>true</downloadSources>
-					<downloadJavadocs>true</downloadJavadocs>
-				</configuration>
-			</plugin>
-
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-compiler-plugin</artifactId>
-				<version>${maven-compiler-plugin.version}</version>
-				<configuration>
-					<source>1.8</source>
-					<target>1.8</target>
-				</configuration>
-			</plugin>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-jar-plugin</artifactId>
-				<version>2.4</version>
-				<configuration>
-					<outputDirectory>${basedir}/target</outputDirectory>
-				</configuration>
-			</plugin>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-surefire-plugin</artifactId>
-				<version>${plugin.surefire.version}</version>
-				<configuration>
-					<excludes>
-						<exclude>**/*TestPig.java</exclude>
-						<exclude>**/*TestHiveEmbedded.java</exclude>
-						<exclude>**/*TestCrunch.java</exclude>
-					</excludes>
-				</configuration>
-			</plugin>
-		</plugins>
-	</build>
-
-	<profiles>
-		<profile>
-			<id>pig</id>
-			<build>
-				<plugins>
-					<plugin>
-						<groupId>org.apache.maven.plugins</groupId>
-						<artifactId>maven-surefire-plugin</artifactId>
-						<version>${plugin.surefire.version}</version>
-						<configuration>
-							<excludes>
-								<exclude>**/*TestPig.java</exclude>
-								<exclude>**/*TestHiveEmbedded.java</exclude>
-								<exclude>**/*TestCrunch.java</exclude>
-								<exclude>**/*TestPetStoreTransactionGeneratorJob.java</exclude>
-							</excludes>
-
-						</configuration>
-					</plugin>
-					<plugin>
-						<groupId>org.codehaus.mojo</groupId>
-						<artifactId>build-helper-maven-plugin</artifactId>
-						<version>1.5</version>
-						<executions>
-							<execution>
-								<id>add-test-source</id>
-								<phase>generate-test-sources</phase>
-								<goals>
-									<goal>add-test-source</goal>
-								</goals>
-								<configuration>
-									<sources>
-										<source>src/integration/java</source>
-									</sources>
-								</configuration>
-							</execution>
-						</executions>
-					</plugin>
-					<plugin>
-						<groupId>org.apache.maven.plugins</groupId>
-						<artifactId>maven-failsafe-plugin</artifactId>
-						<version>2.12</version>
-
-						<configuration>
-							<argLine>-Xmx1g</argLine>
-							<excludes>
-								<exclude>**/*BigPetStoreMahoutIT.java</exclude>
-								<exclude>**/*BigPetStoreHiveIT.java</exclude>
-								<exclude>**/*BigPetStoreCrunchIT.java</exclude>
-							</excludes>
-						</configuration>
-						<executions>
-							<!-- States that both integration-test and verify goals of the Failsafe
-								Maven plugin are executed. -->
-							<execution>
-								<id>integration-tests</id>
-								<goals>
-									<goal>integration-test</goal>
-									<goal>verify</goal>
-								</goals>
-							</execution>
-						</executions>
-					</plugin>
-				</plugins>
-			</build>
-		</profile>
-
-		<profile>
-			<id>hive</id>
-			<build>
-				<plugins>
-					<plugin>
-						<groupId>org.apache.maven.plugins</groupId>
-						<artifactId>maven-surefire-plugin</artifactId>
-						<version>${plugin.surefire.version}</version>
-						<configuration>
-							<excludes>
-								<exclude>**/*TestPig.java</exclude>
-								<exclude>**/*TestHiveEmbedded.java</exclude>
-								<exclude>**/*TestCrunch.java</exclude>
-								<exclude>**/*TestPetStoreTransactionGeneratorJob.java</exclude>
-							</excludes>
-						</configuration>
-					</plugin>
-					<plugin>
-						<groupId>org.codehaus.mojo</groupId>
-						<artifactId>build-helper-maven-plugin</artifactId>
-						<version>1.5</version>
-						<executions>
-							<execution>
-								<id>add-test-source</id>
-								<phase>generate-test-sources</phase>
-								<goals>
-									<goal>add-test-source</goal>
-								</goals>
-								<configuration>
-									<sources>
-										<source>src/integration/java</source>
-									</sources>
-								</configuration>
-							</execution>
-						</executions>
-					</plugin>
-					<plugin>
-						<groupId>org.apache.maven.plugins</groupId>
-						<artifactId>maven-failsafe-plugin</artifactId>
-						<version>2.12</version>
-						<configuration>
-							<excludes>
-								<exclude>**/*BigPetStoreMahoutIT.java</exclude>
-								<exclude>**/*BigPetStorePigIT.java</exclude>
-								<exclude>**/*BigPetStoreCrunchIT.java</exclude>
-							</excludes>
-						</configuration>
-						<executions>
-							<!-- States that both integration-test and verify goals of the Failsafe
-								Maven plugin are executed. -->
-							<execution>
-								<id>integration-tests</id>
-								<goals>
-									<goal>integration-test</goal>
-									<goal>verify</goal>
-								</goals>
-							</execution>
-						</executions>
-					</plugin>
-				</plugins>
-			</build>
-			<dependencies>
-				<!-- hadoop -->
-				<!-- TODO is this version change required? Version 2.2.0 is provided
-					by hadoop-client dependency. Shouldn't we have the same versions for the
-					related dependencies? -->
-				<dependency>
-					<groupId>org.apache.hadoop</groupId>
-					<artifactId>hadoop-mapreduce-client-app</artifactId>
-					<version>2.3.0</version>
-				</dependency>
-			</dependencies>
-		</profile>
-		<profile>
-			<id>crunch</id>
-			<build>
-				<plugins>
-					<plugin>
-						<groupId>org.apache.maven.plugins</groupId>
-						<artifactId>maven-surefire-plugin</artifactId>
-						<version>${plugin.surefire.version}</version>
-						<configuration>
-							<excludes>
-								<exclude>**/*TestPig.java</exclude>
-								<exclude>**/*TestHiveEmbedded.java</exclude>
-								<exclude>**/*TestCrunch.java</exclude>
-								<exclude>**/*TestPetStoreTransactionGeneratorJob.java</exclude>
-							</excludes>
-						</configuration>
-					</plugin>
-					<plugin>
-						<groupId>org.codehaus.mojo</groupId>
-						<artifactId>build-helper-maven-plugin</artifactId>
-						<version>1.5</version>
-						<executions>
-							<execution>
-								<id>add-test-source</id>
-								<phase>generate-test-sources</phase>
-								<goals>
-									<goal>add-test-source</goal>
-								</goals>
-								<configuration>
-									<sources>
-										<source>src/integration/java</source>
-									</sources>
-								</configuration>
-							</execution>
-						</executions>
-					</plugin>
-					<plugin>
-						<groupId>org.apache.maven.plugins</groupId>
-						<artifactId>maven-failsafe-plugin</artifactId>
-						<version>2.12</version>
-						<configuration>
-							<excludes>
-								<exclude>**/*BigPetStorePigIT.java</exclude>
-								<exclude>**/*BigPetStoreHiveIT.java</exclude>
-								<exclude>**/*BigPetStoreMahoutIT.java</exclude>
-							</excludes>
-						</configuration>
-						<executions>
-							<!-- States that both integration-test and verify goals of the Failsafe
-								Maven plugin are executed. -->
-							<execution>
-								<id>integration-tests</id>
-								<goals>
-									<goal>integration-test</goal>
-									<goal>verify</goal>
-								</goals>
-							</execution>
-						</executions>
-					</plugin>
-				</plugins>
-			</build>
-		</profile>
-		<profile>
-			<id>mahout</id>
-			<!-- TODO this property is not being used anywhere. It's not even automatically
-				detectable. Remove? Or do something that the name suggests? -->
-			<properties>
-				<skip.unit.tests>true</skip.unit.tests>
-			</properties>
-			<build>
-				<plugins>
-					<plugin>
-						<groupId>org.apache.maven.plugins</groupId>
-						<artifactId>maven-surefire-plugin</artifactId>
-						<version>${plugin.surefire.version}</version>
-						<configuration>
-							<excludes>
-								<exclude>**/*TestPig.java</exclude>
-								<exclude>**/*TestHiveEmbedded.java</exclude>
-								<exclude>**/*TestCrunch.java</exclude>
-								<exclude>**/*TestPetStoreTransactionGeneratorJob.java</exclude>
-							</excludes>
-						</configuration>
-					</plugin>
-					<plugin>
-						<groupId>org.codehaus.mojo</groupId>
-						<artifactId>build-helper-maven-plugin</artifactId>
-						<version>1.5</version>
-						<executions>
-							<execution>
-								<id>add-test-source</id>
-								<phase>generate-test-sources</phase>
-								<goals>
-									<goal>add-test-source</goal>
-								</goals>
-								<configuration>
-									<sources>
-										<source>src/integration/java</source>
-									</sources>
-								</configuration>
-							</execution>
-						</executions>
-					</plugin>
-					<plugin>
-						<groupId>org.apache.maven.plugins</groupId>
-						<artifactId>maven-failsafe-plugin</artifactId>
-						<version>2.12</version>
-						<configuration>
-							<excludes>
-								<exclude>**/*BigPetStorePigIT.java</exclude>
-								<exclude>**/*BigPetStoreCrunchIT.java</exclude>
-								<exclude>**/*BigPetStoreHiveIT.java</exclude>
-							</excludes>
-						</configuration>
-						<executions>
-							<!-- States that both integration-test and verify goals of the Failsafe
-								Maven plugin are executed. -->
-							<execution>
-								<id>integration-tests</id>
-								<goals>
-									<goal>integration-test</goal>
-									<goal>verify</goal>
-								</goals>
-							</execution>
-						</executions>
-					</plugin>
-				</plugins>
-			</build>
-		</profile>
-	</profiles>
-</project>
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/settings.gradle b/bigtop-bigpetstore/bigpetstore-mapreduce/settings.gradle
deleted file mode 100644
index 53d74f2..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/settings.gradle
+++ /dev/null
@@ -1,18 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-rootProject.name = 'BigPetStore'
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/integrationTest/java/org/apache/bigtop/bigpetstore/BigPetStoreMahoutIT.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/integrationTest/java/org/apache/bigtop/bigpetstore/BigPetStoreMahoutIT.java
deleted file mode 100644
index 4791b2b..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/integrationTest/java/org/apache/bigtop/bigpetstore/BigPetStoreMahoutIT.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore;
-
-import static org.apache.bigtop.bigpetstore.ITUtils.createTestOutputPath;
-import static org.apache.bigtop.bigpetstore.ITUtils.setup;
-
-import java.util.regex.Pattern;
-
-import org.apache.bigtop.bigpetstore.recommend.ItemRecommender;
-import org.apache.bigtop.bigpetstore.util.BigPetStoreConstants.OUTPUTS.MahoutPaths;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.google.common.base.Predicate;
-
-public class BigPetStoreMahoutIT {
-
-  //the cleaned path has subdirs, one for tsv, one for mahout numerical format, and so on.
-  public static final Path INPUT_DIR_PATH =
-          new Path(ITUtils.BPS_TEST_PIG_CLEANED_ROOT, MahoutPaths.Mahout.name());
-  public static final String INPUT_DIR_PATH_STR = INPUT_DIR_PATH.toString();
-  private static final Path MAHOUT_OUTPUT_DIR = createTestOutputPath(MahoutPaths.Mahout.name());
-  private static final Path ALS_FACTORIZATION_OUTPUT_DIR =
-          createTestOutputPath(MahoutPaths.Mahout.name(), MahoutPaths.AlsFactorization.name());
-  private static final Path ALS_RECOMMENDATIONS_DIR =
-          createTestOutputPath(MahoutPaths.Mahout.name(), MahoutPaths.AlsRecommendations.name());
-
-  private ItemRecommender itemRecommender;
-
-  @Before
-  public void setupTest() throws Throwable {
-    setup();
-    try {
-      FileSystem fs = FileSystem.get(new Configuration());
-      fs.delete(MAHOUT_OUTPUT_DIR, true);
-      itemRecommender = new ItemRecommender(INPUT_DIR_PATH_STR, ALS_FACTORIZATION_OUTPUT_DIR.toString(),
-              ALS_RECOMMENDATIONS_DIR.toString());
-    } catch (Exception e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  private static final Predicate<String> TEST_OUTPUT_FORMAT = new Predicate<String>() {
-    private final Pattern p = Pattern.compile("^\\d+\\s\\[\\d+:\\d+\\.\\d+\\]$");
-    @Override
-    public boolean apply(String input) {
-      return p.matcher(input).matches();
-    }
-  };
-
-  @Test
-  public void testPetStorePipeline() throws Exception {
-    itemRecommender.recommend();
-    ITUtils.assertOutput(ALS_RECOMMENDATIONS_DIR, TEST_OUTPUT_FORMAT);
-  }
-
-}
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/integrationTest/java/org/apache/bigtop/bigpetstore/BigPetStorePigIT.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/integrationTest/java/org/apache/bigtop/bigpetstore/BigPetStorePigIT.java
deleted file mode 100644
index 3458abd..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/integrationTest/java/org/apache/bigtop/bigpetstore/BigPetStorePigIT.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore;
-
-import static org.apache.bigtop.bigpetstore.ITUtils.BPS_TEST_GENERATED;
-import static org.apache.bigtop.bigpetstore.ITUtils.BPS_TEST_PIG_CLEANED_ROOT;
-import static org.apache.bigtop.bigpetstore.ITUtils.fs;
-
-import java.io.File;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.bigtop.bigpetstore.etl.PigCSVCleaner;
-import org.apache.bigtop.bigpetstore.util.BigPetStoreConstants;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.pig.ExecType;
-import org.junit.Before;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Predicate;
-import com.google.common.collect.ImmutableMap;
-
-/**
- * This is the main integration test for pig. Like all BPS integration tests, it
- * is designed to simulate exactly what will happen on the actual cluster,
- * except with a small amount of records.
- *
- * In addition to cleaning the dataset, it also runs the BPS_analytics.pig
- * script which BigPetStore ships with.
- */
-public class BigPetStorePigIT {
-
-	final static Logger log = LoggerFactory.getLogger(BigPetStorePigIT.class);
-
-	/**
-	 * An extra unsupported code path that we have so people can do ad hoc
-	 * analytics on pig data after it is cleaned.
-	 */
-	public static final Path BPS_TEST_PIG_COUNT_PRODUCTS = fs
-			.makeQualified(new Path("bps_integration_",
-					BigPetStoreConstants.OUTPUTS.pig_ad_hoc_script.name() + "0"));
-
-	static final File PIG_SCRIPT = new File("BPS_analytics.pig");
-
-	static {
-		if (!PIG_SCRIPT.exists()) {
-			throw new RuntimeException("Couldnt find pig script at " + PIG_SCRIPT.getAbsolutePath());
-		}
-	}
-
-	@Before
-	public void setupTest() throws Throwable {
-		ITUtils.setup();
-		try {
-			FileSystem.get(new Configuration()).delete(BPS_TEST_PIG_CLEANED_ROOT, true);
-			FileSystem.get(new Configuration()).delete(BPS_TEST_PIG_COUNT_PRODUCTS, true);
-		} catch (Exception e) {
-			throw new RuntimeException(e);
-		}
-	}
-
-	static Map<Path, Predicate<String>> TESTS = ImmutableMap.of(
-		/** Test of the main output */
-		new Path(BPS_TEST_PIG_CLEANED_ROOT,"tsv"), ITUtils.VERIFICATION_PERDICATE,
-		// Example of how to count products after doing basic pig data cleanup
-		BPS_TEST_PIG_COUNT_PRODUCTS, ITUtils.VERIFICATION_PERDICATE,
-		// Test the output that is to be used as an input for Mahout.
-		BigPetStoreMahoutIT.INPUT_DIR_PATH, ITUtils.VERIFICATION_PERDICATE
-	);
-
-	@Test
-	public void testPetStoreCorePipeline() throws Exception {
-		//outputs multiple data sets, Mahout, tsv, ...	under BPS_TEST_PIG_CLEANED_ROOT
-		runPig(BPS_TEST_GENERATED, BPS_TEST_PIG_CLEANED_ROOT, PIG_SCRIPT);
-		for (Entry<Path, Predicate<String>> e : TESTS.entrySet()) {
-			ITUtils.assertOutput(e.getKey(), e.getValue());
-		}
-	}
-
-	private void runPig(Path input, Path output, File pigscript)
-			throws Exception {
-		new PigCSVCleaner(input, output, ExecType.LOCAL, pigscript);
-	}
-}
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/integrationTest/java/org/apache/bigtop/bigpetstore/ITUtils.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/integrationTest/java/org/apache/bigtop/bigpetstore/ITUtils.java
deleted file mode 100644
index 8f78751..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/integrationTest/java/org/apache/bigtop/bigpetstore/ITUtils.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore;
-
-import java.io.BufferedReader;
-import java.io.InputStreamReader;
-import java.net.InetAddress;
-import java.nio.charset.Charset;
-import java.util.List;
-
-import org.apache.bigtop.bigpetstore.generator.BPSGenerator;
-import org.apache.bigtop.bigpetstore.util.BigPetStoreConstants;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.Job;
-import org.junit.Assert;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Predicate;
-import com.google.common.io.Files;
-
-public class ITUtils {
-  public static final Path TEST_OUTPUT_DIR = new Path("bps_integration_");
-
-  public static Predicate<String> VERIFICATION_PERDICATE = new Predicate<String>() {
-    @Override
-    public boolean apply(String input) {
-      return true;
-    }
-  };
-
-	static final Logger log = LoggerFactory.getLogger(ITUtils.class);
-
-	static FileSystem fs;
-	static {
-		try {
-			fs = FileSystem.getLocal(new Configuration());
-		} catch (Throwable e) {
-			String cpath = (String) System.getProperties().get("java.class.path");
-			String msg = "";
-			for (String cp : cpath.split(":")) {
-				if (cp.contains("hadoop")) {
-					msg += cp.replaceAll("hadoop", "**HADOOP**") + "\n";
-				}
-			}
-			throw new RuntimeException("Major error:  Probably issue.   "
-			        + "Check hadoop version?  " + e.getMessage()
-			        + " .... check these classpath elements:" + msg);
-		}
-	}
-
-	public static final Path BPS_TEST_GENERATED =
-	        createTestOutputPath(BigPetStoreConstants.OUTPUTS.generated.name());
-
-	//there will be a tsv file under here...
-	public static final Path BPS_TEST_PIG_CLEANED_ROOT =
-	        createTestOutputPath (BigPetStoreConstants.OUTPUTS.cleaned.name());
-	public static Path createTestOutputPath(String... pathParts) {
-	  Path path = TEST_OUTPUT_DIR;
-	  for(String pathPart: pathParts) {
-	    path = new Path(path, pathPart);
-	  }
-	  return path;
-	}
-
-	/**
-	 * Some simple checks to make sure that unit tests in local FS. these arent
-	 * designed to be run against a distribtued system.
-	 */
-	public static void checkConf(Configuration conf) throws Exception {
-		if (conf.get("mapreduce.jobtracker.address") == null) {
-			log.warn("Missing mapreduce.jobtracker.address???????!!!! " + "This can be the case in hive tests which use special "
-					+ "configurations, but we should fix it sometime.");
-			return;
-		}
-		if (!conf.get("mapreduce.jobtracker.address").equals("local")) {
-			throw new RuntimeException("ERROR: bad conf : " + "mapreduce.jobtracker.address");
-		}
-		if (!conf.get("fs.AbstractFileSystem.file.impl").contains("Local")) {
-			throw new RuntimeException("ERROR: bad conf : " + "mapreduce.jobtracker.address");
-		}
-		try {
-			InetAddress addr = java.net.InetAddress.getLocalHost();
-			System.out.println("Localhost = hn=" + addr.getHostName() + " / ha=" + addr.getHostAddress());
-		} catch (Throwable e) {
-			throw new RuntimeException(" ERROR : Hadoop wont work at all  on this machine yet"
-					+ "...I can't get / resolve localhost ! Check java version/ " + "/etc/hosts / DNS or other networking related issues on your box"
-					+ e.getMessage());
-		}
-	}
-
-	/**
-	 * Creates a generated input data set in
-	 *
-	 * test_data_directory/generated. i.e.
-	 * test_data_directory/generated/part-r-00000
-	 */
-	public static void setup() throws Throwable {
-		Configuration conf = new Configuration();
-
-		// debugging for Jeff and others in local fs that won't build
-		checkConf(conf);
-
-		conf.setInt(BPSGenerator.props.bigpetstore_records.name(), BPSGenerator.DEFAULT_NUM_RECORDS);
-
-		if (FileSystem.getLocal(conf).exists(BPS_TEST_GENERATED)) {
-			return;
-		}
-
-		Job createInput = BPSGenerator.getCreateTransactionRecordsJob(BPS_TEST_GENERATED, conf);
-		createInput.waitForCompletion(true);
-
-		Path outputfile = new Path(BPS_TEST_GENERATED, "part-r-00000");
-		List<String> lines = Files.readLines(FileSystem.getLocal(conf).pathToFile(outputfile), Charset.defaultCharset());
-		log.info("output : " + FileSystem.getLocal(conf).pathToFile(outputfile));
-		for (String l : lines) {
-			System.out.println(l);
-		}
-	}
-
-
-	// A functions that logs the output file as a verification test
-	public static void assertOutput(Path base, Predicate<String> validator) throws Exception {
-	  FileSystem fs = FileSystem.getLocal(new Configuration());
-
-	  FileStatus[] files = fs.listStatus(base);
-	  // print out all the files.
-	  for (FileStatus stat : files) {
-	    System.out.println(stat.getPath() + "  " + stat.getLen());
-	  }
-
-	  /**
-	   * Support map OR reduce outputs
-	   */
-	  Path partm = new Path(base, "part-m-00000");
-	  Path partr = new Path(base, "part-r-00000");
-	  Path p = fs.exists(partm) ? partm : partr;
-
-	  /**
-	   * Now we read through the file and validate its contents.
-	   */
-	  BufferedReader r = new BufferedReader(new InputStreamReader(fs.open(p)));
-
-	  // line:{"product":"big chew toy","count":3}
-	  while (r.ready()) {
-	    String line = r.readLine();
-	    log.info("line:" + line);
-	    // System.out.println("line:"+line);
-	    Assert.assertTrue("validationg line : " + line, validator.apply(line));
-	  }
-	}
-
-}
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/contract/PetStoreStatistics.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/contract/PetStoreStatistics.java
deleted file mode 100755
index 158f875..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/contract/PetStoreStatistics.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.bigtop.bigpetstore.contract;
-
-import java.util.Map;
-
-/**
- * This is the contract for the web site. This object is created by each ETL
- * tool : Summary stats.
- */
-public abstract class PetStoreStatistics {
-
-    public abstract Map<String, ? extends Number> numberOfTransactionsByState()
-            throws Exception;
-
-    public abstract Map<String, ? extends Number> numberOfProductsByProduct()
-            throws Exception;
-
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/etl/CrunchETL.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/etl/CrunchETL.java
deleted file mode 100755
index 271083d..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/etl/CrunchETL.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.etl;
-
-import java.util.Map;
-
-import org.apache.bigtop.bigpetstore.contract.PetStoreStatistics;
-import org.apache.crunch.FilterFn;
-import org.apache.crunch.MapFn;
-import org.apache.crunch.PCollection;
-import org.apache.crunch.PTable;
-import org.apache.crunch.Pair;
-import org.apache.crunch.Pipeline;
-import org.apache.crunch.impl.mem.MemPipeline;
-import org.apache.crunch.impl.mr.MRPipeline;
-import org.apache.crunch.io.From;
-import org.apache.crunch.types.avro.Avros;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-
-public class CrunchETL extends PetStoreStatistics {
-
-    public static MapFn<LineItem, String> COUNT_BY_PRODUCT = new MapFn<LineItem, String>() {
-        public String map(LineItem lineItem) {
-            try {
-                return lineItem.getDescription();
-            } catch (Throwable t) {
-                throw new RuntimeException(t);
-            }
-        }
-    };
-    public static MapFn<LineItem, String> COUNT_BY_STATE = new MapFn<LineItem, String>() {
-        public String map(LineItem lineItem) {
-            try {
-                return lineItem.getDescription();
-            } catch (Throwable t) {
-                throw new RuntimeException(t);
-            }
-        }
-    };
-
-    PCollection<LineItem> lineItems;
-
-    public CrunchETL(Path input, Path output) throws Exception {
-        Pipeline pipeline = MemPipeline.getInstance();
-        PCollection<String> lines = pipeline.read(From.textFile(new Path(input,
-                "part-r-00000")));
-        System.out.println("crunch : " + lines.getName() + "  "
-                + lines.getSize());
-        lineItems = lines.parallelDo(ETL, Avros.reflects(LineItem.class));
-
-    }
-
-    public static MapFn ETL = new MapFn<String, LineItem>() {
-        @Override
-        public LineItem map(String input) {
-            String[] fields = input.split(",");
-            LineItem li = new LineItem();
-            li.setAppName(fields[1]);
-            li.setFirstName(fields[3]);
-            // ...
-            li.setDescription(fields[fields.length - 1]);
-            return li;
-        }
-    };
-
-    @Override
-    public Map<String, ? extends Number> numberOfTransactionsByState()
-            throws Exception {
-        PTable<String, Long> counts = lineItems.parallelDo(COUNT_BY_STATE,
-                Avros.strings()).count();
-        Map m = counts.materializeToMap();
-
-        System.out.println("Crunch:::  " + m);
-        return m;
-    }
-
-    @Override
-    public Map<String, ? extends Number> numberOfProductsByProduct()
-            throws Exception {
-        PTable<String, Long> counts = lineItems.parallelDo(COUNT_BY_PRODUCT,
-                Avros.strings()).count();
-        Map m = counts.materializeToMap();
-        //CrunchETL. System.out.println("Crunch:::  " + m);
-        return m;
-    }
-
-    public static void main(String... args) throws Exception {
-        /**
-         * PCollection<String> lines = MemPipeline .collectionOf(
-         *  "BigPetStore,storeCode_AK,1  lindsay,franco,Sat Jan 10 00:11:10 EST 1970,10.5,dog-food"
-         *  "BigPetStore,storeCode_AZ,1  tom,giles,Sun Dec 28 23:08:45 EST 1969,10.5,dog-food"
-         *  "BigPetStore,storeCode_CA,1  brandon,ewing,Mon Dec 08 20:23:57 EST 1969,16.5,organic-dog-food"
-         *  "BigPetStore,storeCode_CA,2  angie,coleman,Thu Dec 11 07:00:31 EST 1969,10.5,dog-food"
-         *  "BigPetStore,storeCode_CA,3  angie,coleman,Tue Jan 20 06:24:23 EST 1970,7.5,cat-food"
-         *  "BigPetStore,storeCode_CO,1  sharon,trevino,Mon Jan 12 07:52:10 EST 1970,30.1,antelope snacks"
-         *  "BigPetStore,storeCode_CT,1  kevin,fitzpatrick,Wed Dec 10 05:24:13 EST 1969,10.5,dog-food"
-         *  "BigPetStore,storeCode_NY,1  dale,holden,Mon Jan 12 23:02:13 EST 1970,19.75,fish-food"
-         *  "BigPetStore,storeCode_NY,2  dale,holden,Tue Dec 30 12:29:52 EST 1969,10.5,dog-food"
-         *  "BigPetStore,storeCode_OK,1  donnie,tucker,Sun Jan 18 04:50:26 EST 1970,7.5,cat-food"
-         * );
-         **/
-        // FAILS
-        Pipeline pipeline = new MRPipeline(CrunchETL.class);
-
-        PCollection<String> lines = pipeline.read(From.textFile(new Path(
-                "/tmp/BigPetStore1388719888255/generated/part-r-00000")));
-
-
-        PCollection<LineItem> lineItems = lines.parallelDo(
-                new MapFn<String, LineItem>() {
-                    @Override
-                    public LineItem map(String input) {
-
-                        System.out.println("proc1 " + input);
-                        String[] fields = input.split(",");
-                        LineItem li = new LineItem();
-                        li.setAppName("" + fields[1]);
-                        li.setFirstName("" + fields[3]);
-                        li.setDescription("" + fields[fields.length - 1]);
-                        return li;
-                    }
-                }, Avros.reflects(LineItem.class));
-
-        for (LineItem i : lineItems.materialize())
-            System.out.println(i);
-    }
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/etl/LineItem.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/etl/LineItem.java
deleted file mode 100755
index a415cf4..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/etl/LineItem.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.etl;
-
-import java.io.Serializable;
-
-public class LineItem implements Serializable{
-
-    public LineItem(String appName, String storeCode, Integer lineId, String firstName, String lastName, String timestamp, Double price, String description){
-        super();
-        this.appName=appName;
-        this.storeCode=storeCode;
-        this.lineId=lineId;
-        this.firstName=firstName;
-        this.lastName=lastName;
-        this.timestamp=timestamp;
-        this.price=price;
-        this.description=description;
-    }
-
-    String appName;
-    String storeCode;
-    Integer lineId;
-    String firstName;
-    String lastName;
-    String timestamp;
-    Double price;
-    String description;
-
-    public LineItem(){
-        super();
-    }
-
-    public String getAppName(){
-        return appName;
-    }
-
-    public void setAppName(String appName){
-        this.appName=appName;
-    }
-
-    public String getStoreCode(){
-        return storeCode;
-    }
-
-    public void setStoreCode(String storeCode){
-        this.storeCode=storeCode;
-    }
-
-    public int getLineId(){
-        return lineId;
-    }
-
-    public void setLineId(int lineId){
-        this.lineId=lineId;
-    }
-
-    public String getFirstName(){
-        return firstName;
-    }
-
-    public void setFirstName(String firstName){
-        this.firstName=firstName;
-    }
-
-    public String getLastName(){
-        return lastName;
-    }
-
-    public void setLastName(String lastName){
-        this.lastName=lastName;
-    }
-
-    public String getTimestamp(){
-        return timestamp;
-    }
-
-    public void setTimestamp(String timestamp){
-        this.timestamp=timestamp;
-    }
-
-    public double getPrice(){
-        return price;
-    }
-
-    public void setPrice(double price){
-        this.price=price;
-    }
-
-    public String getDescription(){
-        return description;
-    }
-
-    public void setDescription(String description){
-        this.description=description;
-    }
-
-    // other constructors, parsers, etc.
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/etl/PigCSVCleaner.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/etl/PigCSVCleaner.java
deleted file mode 100644
index 0ca7444..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/etl/PigCSVCleaner.java
+++ /dev/null
@@ -1,156 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.etl;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.bigtop.bigpetstore.util.BigPetStoreConstants.OUTPUTS;
-import org.apache.bigtop.bigpetstore.util.DeveloperTools;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.pig.ExecType;
-import org.apache.pig.PigServer;
-
-/**
- * This class operates by ETL'ing the data-set into pig.
- * The pigServer is persisted through the life of the class, so that the
- * intermediate data sets created in the constructor can be reused.
- */
-public class PigCSVCleaner  {
-
-    PigServer pigServer;
-
-    private static Path getCleanedTsvPath(Path outputPath) {
-      return new Path(outputPath, OUTPUTS.tsv.name());
-    }
-
-    public PigCSVCleaner(Path inputPath, Path outputPath, ExecType ex, File... scripts)
-            throws Exception {
-        FileSystem fs = FileSystem.get(inputPath.toUri(), new Configuration());
-
-        if(! fs.exists(inputPath)){
-            throw new RuntimeException("INPUT path DOES NOT exist : " + inputPath);
-        }
-
-        if(fs.exists(outputPath)){
-            throw new RuntimeException("OUTPUT already exists : " + outputPath);
-        }
-        // run pig in local mode
-        pigServer = new PigServer(ex);
-
-        /**
-         * First, split the tabs up.
-         *
-         * BigPetStore,storeCode_OK,2 1,yang,jay,3,flea collar,69.56,Mon Dec 15 23:33:49 EST 1969
-         *
-         * ("BigPetStore,storeCode_OK,2", "1,yang,jay,3,flea collar,69.56,Mon Dec 15 23:33:49 EST 1969")
-         */
-        pigServer.registerQuery("csvdata = LOAD '<i>' AS (ID,DETAILS);".replaceAll("<i>", inputPath.toString()));
-
-        // currentCustomerId, firstName, lastName, product.id, product.name.toLowerCase, product.price, date
-        /**
-         * Now, we want to split the two tab delimited fields into uniform
-         * fields of comma separated values. To do this, we 1) Internally split
-         * the FIRST and SECOND fields by commas "a,b,c" --> (a,b,c) 2) FLATTEN
-         * the FIRST and SECOND fields. (d,e) (a,b,c) -> d e a b c
-         */
-        pigServer.registerQuery(
-              "id_details = FOREACH csvdata GENERATE "
-              + "FLATTEN(STRSPLIT(ID, ',', 3)) AS " +
-			"(drop, code, transaction) ,"
-
-              + "FLATTEN(STRSPLIT(DETAILS, ',', 7)) AS " +
-                  "(custId, fname, lname, productId, product:chararray, price, date);");
-        pigServer.registerQuery("mahout_records = FOREACH id_details GENERATE custId, productId, 1;");
-        pigServer.store("id_details", getCleanedTsvPath(outputPath).toString());
-        pigServer.store("mahout_records", new Path(outputPath, OUTPUTS.MahoutPaths.Mahout.name()).toString());
-        /**
-         * Now we run scripts... this is where you can add some
-         * arbitrary analytics.
-         *
-         * We add "input" and "output" parameters so that each
-         * script can read them and use them if they want.
-         *
-         * Otherwise, just hardcode your inputs into your pig scripts.
-         */
-        int i = 0;
-        for(File script : scripts) {
-            Map<String,String> parameters = new HashMap<>();
-            parameters.put("input", getCleanedTsvPath(outputPath).toString());
-
-            Path dir = outputPath.getParent();
-            Path adHocOut = new Path(dir, OUTPUTS.pig_ad_hoc_script.name() + (i++));
-            System.out.println("Setting default output to " + adHocOut);
-            parameters.put("output", adHocOut.toString());
-            pigServer.registerScript(script.getAbsolutePath(), parameters);
-        }
-    }
-
-    private static File[] files(String[] args,int startIndex) {
-        List<File> files = new ArrayList<File>();
-        for(int i = startIndex ; i < args.length ; i++) {
-            File f = new File(args[i]);
-            if(! f.exists()) {
-                throw new RuntimeException("Pig script arg " + i + " " + f.getAbsolutePath() + " not found. ");
-            }
-            files.add(f);
-        }
-        System.out.println(
-                "Ad-hoc analytics:"+
-                "Added  " + files.size() + " pig scripts to post process.  "+
-                "Each one will be given $input and $output arguments.");
-        return files.toArray(new File[]{});
-    }
-
-    public static void main(final String[] args) throws Exception {
-        System.out.println("Starting pig etl " + args.length);
-        Configuration c = new Configuration();
-        int res = ToolRunner.run(c, new Tool() {
-                    Configuration conf;
-                    @Override
-                    public void setConf(Configuration conf) {
-                        this.conf=conf;
-                    }
-
-                    @Override
-                    public Configuration getConf() {
-                        return this.conf;
-                    }
-
-                    @Override
-                    public int run(String[] args) throws Exception {
-                        DeveloperTools.validate(
-                                args,
-                                "generated data directory",
-                                "pig output directory");
-                        new PigCSVCleaner(
-                                new Path(args[0]),
-                                new Path(args[1]),
-                                ExecType.MAPREDUCE,
-                                files(args,2));
-                        return 0;
-                    }
-                }, args);
-        System.exit(res);
-      }
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/BPSGenerator.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/BPSGenerator.java
deleted file mode 100755
index 6c8beef..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/BPSGenerator.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.bigtop.bigpetstore.generator;
-
-import java.io.IOException;
-import java.util.Date;
-
-import org.apache.bigtop.bigpetstore.util.BigPetStoreConstants;
-import org.apache.bigtop.bigpetstore.util.DeveloperTools;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.lib.MultipleOutputs;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Mapper.Context;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import static org.apache.bigtop.bigpetstore.generator.PetStoreTransactionsInputFormat.props;
-
-/**
- * This is a mapreduce implementation of a generator of a large sentiment
- * analysis data set. The scenario is as follows:
- *
- * The number of records will (roughly) correspond to the output size - each
- * record is about 80 bytes.
- *
- * 1KB set bigpetstore_records=10 1MB set bigpetstore_records=10,000 1GB set
- * bigpetstore_records=10,000,000 1TB set bigpetstore_records=10,000,000,000
- */
-public class BPSGenerator {
-
-  public static final int DEFAULT_NUM_RECORDS = 100;
-
-  final static Logger log = LoggerFactory.getLogger(BPSGenerator.class);
-
-  public enum props {
-    bigpetstore_records
-  }
-
-  public static Job createJob(Path output, int records) throws IOException {
-    Configuration c = new Configuration();
-    c.setInt(props.bigpetstore_records.name(), DEFAULT_NUM_RECORDS);
-    return getCreateTransactionRecordsJob(output, c);
-  }
-
-  public static Job getCreateTransactionRecordsJob(Path outputDir, Configuration conf)
-          throws IOException {
-    Job job = new Job(conf, "PetStoreTransaction_ETL_" + System.currentTimeMillis());
-    // recursively delete the data set if it exists.
-    FileSystem.get(outputDir.toUri(), conf).delete(outputDir, true);
-    job.setJarByClass(BPSGenerator.class);
-    job.setMapperClass(MyMapper.class);
-    // use the default reducer
-    // job.setReducerClass(PetStoreTransactionGeneratorJob.Red.class);
-    job.setOutputKeyClass(Text.class);
-    job.setOutputValueClass(Text.class);
-    job.setMapOutputKeyClass(Text.class);
-    job.setMapOutputValueClass(Text.class);
-    job.setInputFormatClass(PetStoreTransactionsInputFormat.class);
-    job.setOutputFormatClass(TextOutputFormat.class);
-    FileOutputFormat.setOutputPath(job, outputDir);
-    return job;
-  }
-
-  public static class MyMapper extends Mapper<Text, Text, Text, Text> {
-    @Override
-    protected void setup(Context context) throws IOException,
-    InterruptedException {
-      super.setup(context);
-    }
-
-    protected void map(Text key, Text value, Context context)
-            throws java.io.IOException, InterruptedException {
-      context.write(key, value);
-    }
-  }
-
-  public static void main(String args[]) throws Exception {
-    if (args.length != 2) {
-      System.err.println("USAGE : [number of records] [output path]");
-      System.exit(0);
-    } else {
-      Configuration conf = new Configuration();
-      DeveloperTools.validate(args, "# of records", "output path");
-      conf.setInt(PetStoreTransactionsInputFormat.props.bigpetstore_records.name(),
-              Integer.parseInt(args[0]));
-      getCreateTransactionRecordsJob(new Path(args[1]), conf).waitForCompletion(true);
-    }
-  }
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/CustomerGenerator.scala b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/CustomerGenerator.scala
deleted file mode 100644
index 0223c8d..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/CustomerGenerator.scala
+++ /dev/null
@@ -1,97 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.generator
-
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.FileSystem
-import org.apache.bigtop.bigpetstore.generator.util.State
-import org.apache.hadoop.fs.Path
-import parquet.org.codehaus.jackson.format.DataFormatDetector
-import org.slf4j.LoggerFactory
-import java.util.{Collection => JavaCollection}
-import scala.collection.JavaConversions.asJavaCollection
-import java.util.Random
-import scala.collection.mutable.{HashMap, Set, MultiMap}
-import scala.collection.immutable.NumericRange
-
-/**
- * This class generates random customer data. The generated customer
- * ids will be consecutive. The client code that generates the transactions
- * records needs to know the available customer ids. If we keep the customer
- * ids consecutive here. we don't have to store those ids in memory, or perform
- * costly lookups. Once we introduce something that allows efficient lookup
- * of data, we can do something else as well.
- *
- * The generated customer ids will start from 1. So, if we have 100 customers,
- * the ids will be [1, 100].
- */
-class CustomerGenerator(val desiredCustomerCount: Int, val outputPath: Path) {
-  private val logger = LoggerFactory.getLogger(getClass)
-  private val random = new Random;
-  private val assertion = "The generateCustomerRecords() hasn't been called yet";
-  private var customerFileGenerated = false
-  private val _stateToCustomerIds = new HashMap[State, NumericRange[Long]]
-
-  def isCustomerFileGenrated = customerFileGenerated
-
-  def customerIds(state: State) = {
-    assert(customerFileGenerated, assertion)
-    _stateToCustomerIds(state)
-  }
-
-  def generateCustomerRecords() = {
-    val config = new Configuration
-    val fs = FileSystem.getLocal(config)
-
-    assert(!fs.exists(outputPath))
-
-    val outputStream = fs.create(outputPath)
-
-    var currentId: Long = 1
-    logger.info("Generating customer records at: {}", fs.pathToFile(outputPath))
-    for (state <- State.values();
-            stateCustomerCount = (state.probability * desiredCustomerCount) toLong;
-            random = new Random(state.hashCode);
-            i <- 1L to stateCustomerCount) {
-      val customerRecord = CustomerGenerator.createRecord(currentId, state, random);
-      logger.info("generated customer: {}", customerRecord)
-      outputStream.writeBytes(customerRecord)
-
-      if(i == 1) {
-        val stateCustomerIdRange = currentId until (currentId + stateCustomerCount);
-        _stateToCustomerIds += (state -> stateCustomerIdRange)
-      }
-      currentId += 1
-    }
-
-    println(_stateToCustomerIds)
-    outputStream.flush
-    outputStream.close
-    customerFileGenerated = true
-  }
-}
-
-object CustomerGenerator {
-  val OUTPUT_FILE_NAME = "customers"
-
-  private def createRecord(id: Long, state: State, r: Random) = {
-    val firstName = DataForger.firstName
-    val lastName = DataForger.lastName
-    s"$id\t${DataForger.firstName(r)}\t${DataForger.lastName(r)}\t${state.name}\n"
-  }
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/PetStoreTransaction.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/PetStoreTransaction.java
deleted file mode 100755
index 27a3407..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/PetStoreTransaction.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.generator;
-
-import java.util.Date;
-
-public interface PetStoreTransaction {
-
-    public String getFirstName();
-
-    public String getLastName();
-
-    public String getProduct();
-
-    public Date getDate();
-
-    public Integer getPrice();
-
-}
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/PetStoreTransactionInputSplit.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/PetStoreTransactionInputSplit.java
deleted file mode 100755
index d350cc8..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/PetStoreTransactionInputSplit.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.generator;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-
-import org.apache.bigtop.bigpetstore.generator.util.State;
-import org.apache.commons.lang3.Range;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapreduce.InputSplit;
-
-/**
- * What does an `InputSplit` actually do? From the Javadocs, it looks like ...
- * absolutely nothing.
- *
- * Note: for some reason, you *have* to implement Writable, even if your methods
- * do nothing, or you will got strange and un-debuggable null pointer
- * exceptions.
- */
-public class PetStoreTransactionInputSplit extends InputSplit implements
-        Writable {
-
-    public PetStoreTransactionInputSplit() {
-    }
-
-    public int records;
-    public State state;
-    public Range<Long> customerIdRange;
-
-    public PetStoreTransactionInputSplit(int records, Range<Long> customerIdRange, State state) {
-        this.records = records;
-        this.state = state;
-        this.customerIdRange = customerIdRange;
-    }
-
-    public void readFields(DataInput dataInputStream) throws IOException {
-        records = dataInputStream.readInt();
-        state = State.valueOf(dataInputStream.readUTF());
-        customerIdRange = Range.between(dataInputStream.readLong(), dataInputStream.readLong());
-    }
-
-    public void write(DataOutput dataOutputStream) throws IOException {
-        dataOutputStream.writeInt(records);
-        dataOutputStream.writeUTF(state.name());
-        dataOutputStream.writeLong(customerIdRange.getMinimum());
-        dataOutputStream.writeLong(customerIdRange.getMaximum());
-    }
-
-    @Override
-    public String[] getLocations() throws IOException, InterruptedException {
-        return new String[] {};
-    }
-
-    @Override
-    public long getLength() throws IOException, InterruptedException {
-        return records;
-    }
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/PetStoreTransactionsInputFormat.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/PetStoreTransactionsInputFormat.java
deleted file mode 100755
index 4c22e36..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/PetStoreTransactionsInputFormat.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.generator;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-import org.apache.bigtop.bigpetstore.generator.TransactionIteratorFactory.KeyVal;
-import org.apache.bigtop.bigpetstore.generator.util.State;
-import org.apache.commons.lang3.Range;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-
-/**
- * A simple input split that fakes input.
- */
-public class PetStoreTransactionsInputFormat extends
-    FileInputFormat<Text, Text> {
-
-  @Override
-  public RecordReader<Text, Text> createRecordReader(
-          final InputSplit inputSplit, TaskAttemptContext arg1)
-                  throws IOException, InterruptedException {
-    return new RecordReader<Text, Text>() {
-
-      @Override
-      public void close() throws IOException {
-
-      }
-
-      /**
-       * We need the "state" information to generate records. - Each state
-       * has a probability associated with it, so that our data set can be
-       * realistic (i.e. Colorado should have more transactions than rhode
-       * island).
-       *
-       * - Each state also will its name as part of the key.
-       *
-       * - This task would be distributed, for example, into 50 nodes on a
-       * real cluster, each creating the data for a given state.
-       */
-
-      PetStoreTransactionInputSplit bpsInputplit = (PetStoreTransactionInputSplit) inputSplit;
-      int records = bpsInputplit.records;
-      // TODO why not send the whole InputSplit there?
-      Iterator<KeyVal<String, String>> data =
-              (new TransactionIteratorFactory(records, bpsInputplit.customerIdRange, bpsInputplit.state)).data();
-      KeyVal<String, String> currentRecord;
-
-      @Override
-      public Text getCurrentKey() throws IOException,
-      InterruptedException {
-        return new Text(currentRecord.key());
-      }
-
-      @Override
-      public Text getCurrentValue() throws IOException,
-      InterruptedException {
-        return new Text(currentRecord.value());
-      }
-
-      @Override
-      public void initialize(InputSplit arg0, TaskAttemptContext arg1)
-              throws IOException, InterruptedException {
-      }
-
-      @Override
-      public boolean nextKeyValue() throws IOException,
-      InterruptedException {
-        if (data.hasNext()) {
-          currentRecord = data.next();
-          return true;
-        }
-        return false;
-      }
-
-      @Override
-      public float getProgress() throws IOException, InterruptedException {
-        return 0f;
-      }
-
-    };
-  }
-
-  public enum props {
-    bigpetstore_records
-  }
-
-  @Override
-  public List<InputSplit> getSplits(JobContext arg) throws IOException {
-    int numRecordsDesired = arg
-            .getConfiguration()
-            .getInt(PetStoreTransactionsInputFormat.props.bigpetstore_records
-                    .name(), -1);
-    if (numRecordsDesired == -1) {
-      throw new RuntimeException(
-              "# of total records not set in configuration object: "
-                      + arg.getConfiguration());
-    }
-
-    List<InputSplit> list = new ArrayList<InputSplit>();
-    long customerIdStart = 1;
-    for (State s : State.values()) {
-      int numRecords = numRecords(numRecordsDesired, s.probability);
-      // each state is assigned a range of customer-ids from which it can choose.
-      // The number of customers can be as many as the number of transactions.
-      Range<Long> customerIdRange = Range.between(customerIdStart, customerIdStart + numRecords - 1);
-      PetStoreTransactionInputSplit split =
-              new PetStoreTransactionInputSplit(numRecords, customerIdRange, s);
-      System.out.println(s + " _ " + split.records);
-      list.add(split);
-      customerIdStart += numRecords;
-    }
-    return list;
-  }
-
-  private int numRecords(int numRecordsDesired, float probability) {
-    return (int) (Math.ceil(numRecordsDesired * probability));
-  }
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/util/Product.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/util/Product.java
deleted file mode 100644
index 54ae8fe..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/util/Product.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.generator.util;
-
-import java.math.BigDecimal;
-import static org.apache.bigtop.bigpetstore.generator.util.ProductType.*;
-
-public enum Product {
-  DOG_FOOD(DOG, 10.50),
-  ORGANIC_DOG_FOOD(DOG, 16.99),
-  STEEL_LEASH(DOG, 19.99),
-  FUZZY_COLLAR(DOG, 24.90),
-  LEATHER_COLLAR(DOG, 18.90),
-  CHOKE_COLLAR(DOG, 15.50),
-  DOG_HOUSE(DOG, 109.99),
-  CHEWY_BONE(DOG, 20.10),
-  DOG_VEST(DOG, 19.99),
-  DOG_SOAP(DOG, 5.45),
-
-  CAT_FOOD(CAT, 7.50),
-  FEEDER_BOWL(CAT, 10.99),
-  LITTER_BOX(CAT, 24.95),
-  CAT_COLLAR(CAT, 7.95),
-  CAT_BLANKET(CAT, 14.49),
-
-  TURTLE_PELLETS(TURTLE, 4.95),
-  TURTLE_FOOD(TURTLE, 10.90),
-  TURTLE_TUB(TURTLE, 40.45),
-
-  FISH_FOOD(FISH, 12.50),
-  SALMON_BAIT(FISH, 29.95),
-  FISH_BOWL(FISH, 20.99),
-  AIR_PUMP(FISH, 13.95),
-  FILTER(FISH, 34.95),
-
-  DUCK_COLLAR(DUCK, 13.25),
-  DUCK_FOOD(DUCK, 20.25),
-  WADING_POOL(DUCK, 45.90);
-
-  /*
-  ANTELOPE_COLLAR(OTHER, 19.90),
-  ANTELOPE_SNACKS(OTHER, 29.25),
-  RODENT_CAGE(OTHER, 39.95),
-  HAY_BALE(OTHER, 4.95),
-  COW_DUNG(OTHER, 1.95),
-  SEAL_SPRAY(OTHER, 24.50),
-  SNAKE_BITE_OINTMENT(OTHER, 29.90);
-  */
-  private final BigDecimal price;
-  public final ProductType productType;
-  private Product(ProductType productType, double price) {
-    this.price = BigDecimal.valueOf(price);
-    this.productType = productType;
-  }
-
-  public int id() {
-    return this.ordinal();
-  }
-
-  public BigDecimal price() {
-    return this.price;
-  }
-
-
-}
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/util/ProductType.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/util/ProductType.java
deleted file mode 100644
index af9ea7f..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/util/ProductType.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.generator.util;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-public enum ProductType {
-  DOG, CAT, TURTLE, FISH, DUCK;
-
-  private List<Product> products;
-
-  public List<Product> getProducts() {
-    if(products == null) {
-      generateProductList();
-    }
-    return products;
-  }
-
-  private void generateProductList() {
-    List<Product> products = new ArrayList<>();
-    for(Product p : Product.values()) {
-      if(p.productType == this) {
-        products.add(p);
-      }
-    }
-    this.products = Collections.unmodifiableList(products);
-  }
-
-}
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/util/State.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/util/State.java
deleted file mode 100644
index 2c729a7..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/generator/util/State.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.generator.util;
-
-import java.util.Random;
-
-
-/**
- * Each "state" has a pet store , with a certain "proportion" of the
- * transactions.
- */
-public enum State {
-  // Each state is associated with a relative probability.
-  AZ(.1f),
-  AK(.1f),
-  CT(.1f),
-  OK(.1f),
-  CO(.1f),
-  CA(.3f),
-  NY(.2f);
-
-  public static Random rand = new Random();
-  public float probability;
-
-  private State(float probability) {
-    this.probability = probability;
-  }
-}
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/recommend/ItemRecommender.scala b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/recommend/ItemRecommender.scala
deleted file mode 100644
index 10acd5a..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/recommend/ItemRecommender.scala
+++ /dev/null
@@ -1,121 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.bigtop.bigpetstore.recommend
-
-import org.apache.mahout.cf.taste.hadoop.als.RecommenderJob
-import org.apache.mahout.cf.taste.hadoop.als.ParallelALSFactorizationJob
-import java.io.File
-import parquet.org.codehaus.jackson.map.DeserializerFactory.Config
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.conf.Configurable
-import org.apache.hadoop.util.ToolRunner
-import org.apache.mahout.cf.taste.hadoop.als.SharingMapper
-import org.apache.hadoop.util.Tool
-import org.apache.bigtop.bigpetstore.util.DeveloperTools
-
-// We don't need to wrap these two jobs in ToolRunner.run calls since the only
-// thing that we are doing right now is calling the run() methods of RecommenderJob
-// and ParallelALSFactorizationJob. Both of these classes have a main() method that
-// internally calls ToolRunner.run with all the command line args passed. So, if
-// we want to run this code from the command line, we can easily do so by running
-// the main methods of the ParallelALSFactorizationJob, followed by running the
-// main method of RecommenderJob. That would also take care of the multiple-jvm
-// instance issue metioned in the comments below, so the call to
-class ItemRecommender(private val inputDir: String,
-        private val factorizationOutputDir: String,
-        private val recommendationsOutputDir: String) {
-  private val recommenderJob = new RecommenderJob
-  private val factorizationJob = new ParallelALSFactorizationJob
-
-  private def tempDir = "/tmp/mahout_" + System.currentTimeMillis
-
-  private def performAlsFactorization() = {
-    ToolRunner.run(factorizationJob, Array(
-        "--input", inputDir,
-        "--output", factorizationOutputDir,
-        "--lambda", "0.1",
-        "--tempDir", tempDir,
-        "--implicitFeedback", "false",
-        "--alpha", "0.8",
-        "--numFeatures", "2",
-        "--numIterations", "5",
-        "--numThreadsPerSolver", "1"))
-  }
-
-  private def generateRecommendations() = {
-    ToolRunner.run(recommenderJob, (Array(
-        "--input", factorizationOutputDir + "/userRatings/",
-        "--userFeatures", factorizationOutputDir + "/U/",
-        "--itemFeatures", factorizationOutputDir + "/M/",
-        "--numRecommendations", "1",
-        "--output", recommendationsOutputDir,
-        "--maxRating", "1")))
-  }
-
-  // At this point, the performAlsFactorization generateRecommendations
-  // and this method can not be run from the same VM instance. These two jobs
-  // share a common static variable which is not being handled correctly.
-  // This, unfortunately, results in a class-cast exception being thrown. That's
-  // why the resetFlagInSharedAlsMapper is required. See the comments on
-  // resetFlagInSharedAlsMapper() method.
-  def recommend = {
-    performAlsFactorization
-    resetFlagInSharedAlsMapper
-    generateRecommendations
-  }
-
-  // necessary for local execution in the same JVM only. If the performAlsFactorization()
-  // and generateRecommendations() calls are performed in separate JVM instances, this
-  // would be taken care of automatically. However, if we want to run this two methods
-  // as one task, we need to clean up the static state set by these methods, and we don't
-  // have any legitimate way of doing this directly. This clean-up should have been
-  // performed by ParallelALSFactorizationJob class after the job is finished.
-  // TODO: remove this when a better way comes along, or ParallelALSFactorizationJob
-  // takes responsibility.
-  private def resetFlagInSharedAlsMapper {
-    val m = classOf[SharingMapper[_, _, _, _, _]].getDeclaredMethod("reset");
-    m setAccessible true
-    m.invoke(null)
-  }
-}
-
-object ItemRecommender {
-  def main(args: Array[String]) {
-      val res = ToolRunner.run(new Configuration(), new Tool() {
-      var conf: Configuration = _;
-
-      override def setConf(conf: Configuration) {
-        this.conf=conf;
-      }
-
-
-      override def getConf() = {
-        this.conf;
-      }
-
-
-      override def run(toolArgs: Array[String]) = {
-        val ir = new ItemRecommender(toolArgs(0), toolArgs(1), toolArgs(2))
-        ir.recommend
-        0;
-      }
-    }, args);
-    System.exit(res);
-  }
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/util/BigPetStoreConstants.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/util/BigPetStoreConstants.java
deleted file mode 100755
index 01a6b95..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/util/BigPetStoreConstants.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Static final constants
- *
- * is useful to have the basic sql here as the HIVE SQL can vary between hive
- * versions if updated here will update everywhere
- */
-
-package org.apache.bigtop.bigpetstore.util;
-
-public class BigPetStoreConstants {
-
-   //Files should be stored in graphviz arch.dot
-   public static enum OUTPUTS {
-        generated,//generator
-        cleaned,//pig
-        tsv,
-        pig_ad_hoc_script,
-        CUSTOMER_PAGE; //crunchhh
-
-        public static enum MahoutPaths {
-          Mahout,
-          AlsFactorization,
-          AlsRecommendations
-        }
-    };
-
-}
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/util/DeveloperTools.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/util/DeveloperTools.java
deleted file mode 100755
index 06671b9..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/util/DeveloperTools.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.util;
-
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.mapreduce.Job;
-
-/**
- * Dev utilities for testing arguments etc...
- */
-public class DeveloperTools {
-
-    /**
-     * Validates that the expected args are present in the "args" array.
-     * Just some syntactic sugar for good arg error handling.
-     * @param args
-     * @param expected arguments.
-     */
-    public static void validate(String[] args, String... expected) {
-        int i=-1;
-        try{
-            for(i = 0 ; i < expected.length ; i++) {
-                System.out.println("VALUE OF " + expected[i] + " = " + args[i]);
-            }
-        }
-        catch(Throwable t) {
-            System.out.println("Argument " + i + " not available.");
-            System.out.println("We expected " + expected.length + " arguments for this phase");
-        }
-
-
-    }
-    public static void main(String[] args) throws Exception {
-        Log LOG = LogFactory.getLog(Job.class);
-    }
-
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/util/NumericalIdUtils.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/util/NumericalIdUtils.java
deleted file mode 100644
index c652beb..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/util/NumericalIdUtils.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.bigtop.bigpetstore.util;
-
-import org.apache.bigtop.bigpetstore.generator.util.State;
-
-/**
- * User and Product IDs need numerical
- * identifiers for recommender algorithms
- * which attempt to interpolate new
- * products.
- *
- * TODO: Delete this class. Its not necessarily required: We might just use HIVE HASH() as our
- * standard for this.
- */
-public class NumericalIdUtils {
-
-    /**
-     * People: Leading with ordinal code for state.
-     */
-    public static long toId(State state, String name){
-        String fromRawData =
-                state==null?
-                        name:
-                         (state.name()+"_"+name);
-        return fromRawData.hashCode();
-    }
-    /**
-     * People: Leading with ordinal code for state.
-     */
-    public static long toId(String name){
-        return toId(null,name);
-    }
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/util/PetStoreParseFunctions.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/util/PetStoreParseFunctions.java
deleted file mode 100755
index 056dfc3..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/util/PetStoreParseFunctions.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.bigtop.bigpetstore.util;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * TODO: This might be dead code.
- */
-public class PetStoreParseFunctions {
-
-    String[] headers = { "code", "city", "country", "lat", "lon" };
-
-    public Map<String, Object> parse(String line) {
-
-        Map<String, Object> resultMap = new HashMap<String, Object>();
-
-        List<String> csvObj = null;
-
-        String[] temp = line.split(",");
-        csvObj = new ArrayList<String>(Arrays.asList(temp));
-
-        if (csvObj.isEmpty()) {
-            return resultMap;
-        }
-
-        int k = 0;
-
-        for (String valueStr : csvObj) {
-
-            resultMap.put(headers[k++], valueStr);
-
-        }
-
-        return resultMap;
-    }
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/util/StringUtils.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/util/StringUtils.java
deleted file mode 100644
index e4e012e..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/java/org/apache/bigtop/bigpetstore/util/StringUtils.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.util;
-
-import java.util.ArrayList;
-
-/**
-********************************************************************
-* Borrowed from apache-commons-lang StringUtils, overtime we might
-* add more elements here .
-* To maintain minor dependencies on a cluster sometimes this is easier
-* jar's manually in the hadoop classpath or via DistributedCache.
-********************************************************************/
-
-public class StringUtils {
-
-     public static String substringBefore(String str, String separator) {
-         int pos = str.indexOf(separator);
-         if (pos == -1) {
-             return str;
-         }
-         return str.substring(0, pos);
-     }
-
-
-     public static String substringAfter(String str, String separator) {
-         if (str.length()==0) {
-             return str;
-         }
-         if (separator == null) {
-             return "";
-         }
-         int pos = str.indexOf(separator);
-         if (pos == -1) {
-             return "";
-         }
-         return str.substring(pos + separator.length());
-     }
- }
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/scala/org/apache/bigtop/bigpetstore/generator/DataForger.scala b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/scala/org/apache/bigtop/bigpetstore/generator/DataForger.scala
deleted file mode 100644
index c5e6513..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/scala/org/apache/bigtop/bigpetstore/generator/DataForger.scala
+++ /dev/null
@@ -1,280 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.generator
-
-import java.util.Random
-import org.jfairy.Fairy
-import java.util.Date
-
-
-/**
- * Generic class for generating random data. This class was created so
- * that we can provide a uniform API for getting random data. If we want,
- * we can replace the underlying data-generation implementation using
- * existing libraries.
- */
-object DataForger {
-  private val random = new Random
-  private val fairy = Fairy.create()
-
-  // TODO: Jay / Bhashit : refactor to use a random data generator?
-  def firstName(random: Random) = firstNames(random.nextInt(firstNames.length))
-  def firstName: String = firstName(random)
-
-  // TODO: Jay / Bhashit : refactor to use a random data generator?
-  def lastName(random: Random) = lastNames(random.nextInt(lastNames.length))
-  def lastName: String = lastName(random)
-
-  def randomDateInPastYears(maxYearsEarlier: Int) = fairy.dateProducer().randomDateInThePast(maxYearsEarlier).toDate()
-
-  private val firstNames =  IndexedSeq("Aaron", "Abby", "Abigail", "Adam",
-          "Alan", "Albert", "Alex", "Alexandra", "Alexis", "Alice", "Alicia",
-          "Alisha", "Alissa", "Allen", "Allison", "Alyssa", "Amanda", "Amber",
-          "Amy", "Andrea", "Andrew", "Andy", "Angel", "Angela", "Angie",
-          "Anita", "Ann", "Anna", "Annette", "Anthony", "Antonio", "April",
-          "Arthur", "Ashley", "Audrey", "Austin", "Autumn", "Baby", "Barb",
-          "Barbara", "Becky", "Benjamin", "Beth", "Bethany", "Betty",
-          "Beverly", "Bill", "Billie", "Billy", "Blake", "Bob", "Bobbie",
-          "Bobby", "Bonnie", "Brad", "Bradley", "Brady", "Brandi", "Brandon",
-          "Brandy", "Breanna", "Brenda", "Brent", "Brett", "Brian", "Brianna",
-          "Brittany", "Brooke", "Brooklyn", "Bruce", "Bryan", "Caleb",
-          "Cameron", "Candy", "Carl", "Carla", "Carmen", "Carol", "Carolyn",
-          "Carrie", "Casey", "Cassandra", "Catherine", "Cathy", "Chad",
-          "Charlene", "Charles", "Charlie", "Charlotte", "Chase", "Chasity",
-          "Chastity", "Chelsea", "Cheryl", "Chester", "Cheyenne", "Chris",
-          "Christian", "Christina", "Christine", "Christoph", "Christopher",
-          "Christy", "Chuck", "Cindy", "Clara", "Clarence", "Clayton",
-          "Clifford", "Clint", "Cody", "Colton", "Connie", "Corey", "Cory",
-          "Courtney", "Craig", "Crystal", "Curtis", "Cynthia", "Dakota",
-          "Dale", "Dallas", "Dalton", "Dan", "Dana", "Daniel", "Danielle",
-          "Danny", "Darla", "Darlene", "Darrell", "Darren", "Dave", "David",
-          "Dawn", "Dean", "Deanna", "Debbie", "Deborah", "Debra", "Denise",
-          "Dennis", "Derek", "Derrick", "Destiny", "Devin", "Diana", "Diane",
-          "Dillon", "Dixie", "Dominic", "Don", "Donald", "Donna", "Donnie",
-          "Doris", "Dorothy", "Doug", "Douglas", "Drew", "Duane", "Dustin",
-          "Dusty", "Dylan", "Earl", "Ed", "Eddie", "Edward", "Elaine",
-          "Elizabeth", "Ellen", "Emily", "Eric", "Erica", "Erika", "Erin",
-          "Ernest", "Ethan", "Eugene", "Eva", "Evelyn", "Everett", "Faith",
-          "Father", "Felicia", "Floyd", "Francis", "Frank", "Fred", "Gabriel",
-          "Gage", "Gail", "Gary", "Gene", "George", "Gerald", "Gina", "Ginger",
-          "Glen", "Glenn", "Gloria", "Grace", "Greg", "Gregory", "Haley",
-          "Hannah", "Harley", "Harold", "Harry", "Heath", "Heather", "Heidi",
-          "Helen", "Herbert", "Holly", "Hope", "Howard", "Hunter", "Ian",
-          "Isaac", "Jack", "Jackie", "Jacob", "Jade", "Jake", "James", "Jamie",
-          "Jan", "Jane", "Janet", "Janice", "Jared", "Jasmine", "Jason", "Jay",
-          "Jean", "Jeannie", "Jeff", "Jeffery", "Jeffrey", "Jenna", "Jennifer",
-          "Jenny", "Jeremiah", "Jeremy", "Jerry", "Jesse", "Jessica", "Jessie",
-          "Jill", "Jim", "Jimmy", "Joann", "Joanne", "Jodi", "Jody", "Joe",
-          "Joel", "Joey", "John", "Johnathan", "Johnny", "Jon", "Jonathan",
-          "Jonathon", "Jordan", "Joseph", "Josh", "Joshua", "Joyce", "Juanita",
-          "Judy", "Julia", "Julie", "Justin", "Kaitlyn", "Karen", "Katelyn",
-          "Katherine", "Kathleen", "Kathryn", "Kathy", "Katie", "Katrina",
-          "Kay", "Kayla", "Kaylee", "Keith", "Kelly", "Kelsey", "Ken",
-          "Kendra", "Kenneth", "Kenny", "Kevin", "Kim", "Kimberly", "Kris",
-          "Krista", "Kristen", "Kristin", "Kristina", "Kristy", "Kyle",
-          "Kylie", "Lacey", "Laken", "Lance", "Larry", "Laura", "Lawrence",
-          "Leah", "Lee", "Leonard", "Leroy", "Leslie", "Levi", "Lewis",
-          "Linda", "Lindsay", "Lindsey", "Lisa", "Lloyd", "Logan", "Lois",
-          "Loretta", "Lori", "Louis", "Lynn", "Madison", "Mandy", "Marcus",
-          "Margaret", "Maria", "Mariah", "Marie", "Marilyn", "Marion", "Mark",
-          "Marlene", "Marsha", "Martha", "Martin", "Marty", "Marvin", "Mary",
-          "Mary ann", "Mason", "Matt", "Matthew", "Max", "Megan", "Melanie",
-          "Melinda", "Melissa", "Melody", "Michael", "Michelle", "Mickey",
-          "Mike", "Mindy", "Miranda", "Misty", "Mitchell", "Molly", "Monica",
-          "Morgan", "Mother", "Myron", "Nancy", "Natasha", "Nathan",
-          "Nicholas", "Nick", "Nicole", "Nina", "Noah", "Norma", "Norman",
-          "Olivia", "Paige", "Pam", "Pamela", "Pat", "Patricia", "Patrick",
-          "Patty", "Paul", "Paula", "Peggy", "Penny", "Pete", "Phillip",
-          "Phyllis", "Rachael", "Rachel", "Ralph", "Randall", "Randi", "Randy",
-          "Ray", "Raymond", "Rebecca", "Regina", "Renee", "Rex", "Rhonda",
-          "Richard", "Rick", "Ricky", "Rita", "Rob", "Robbie", "Robert",
-          "Roberta", "Robin", "Rochelle", "Rocky", "Rod", "Rodney", "Roger",
-          "Ron", "Ronald", "Ronda", "Ronnie", "Rose", "Roxanne", "Roy", "Russ",
-          "Russell", "Rusty", "Ruth", "Ryan", "Sabrina", "Sally", "Sam",
-          "Samantha", "Samuel", "Sandra", "Sandy", "Sara", "Sarah", "Savannah",
-          "Scott", "Sean", "Seth", "Shanda", "Shane", "Shanna", "Shannon",
-          "Sharon", "Shaun", "Shawn", "Shawna", "Sheila", "Shelly", "Sher",
-          "Sherri", "Sherry", "Shirley", "Sierra", "Skyler", "Stacey", "Stacy",
-          "Stanley", "Stephanie", "Stephen", "Steve", "Steven", "Sue",
-          "Summer", "Susan", "Sydney", "Tabatha", "Tabitha", "Tamara", "Tammy",
-          "Tara", "Tasha", "Tashia", "Taylor", "Ted", "Teresa", "Terri",
-          "Terry", "Tessa", "Thelma", "Theresa", "Thomas", "Tia", "Tiffany",
-          "Tim", "Timmy", "Timothy", "Tina", "Todd", "Tom", "Tommy", "Toni",
-          "Tony", "Tonya", "Tracey", "Tracie", "Tracy", "Travis", "Trent",
-          "Trevor", "Trey", "Trisha", "Tristan", "Troy", "Tyler", "Tyrone",
-          "Unborn", "Valerie", "Vanessa", "Vernon", "Veronica", "Vicki",
-          "Vickie", "Vicky", "Victor", "Victoria", "Vincent", "Virginia",
-          "Vivian", "Walter", "Wanda", "Wayne", "Wendy", "Wesley", "Whitney",
-          "William", "Willie", "Wyatt", "Zachary")
-
-  private val lastNames = IndexedSeq("Abbott", "Acevedo", "Acosta", "Adams",
-          "Adkins", "Aguilar", "Aguirre", "Albert", "Alexander", "Alford",
-          "Allen", "Allison", "Alston", "Alvarado", "Alvarez", "Anderson",
-          "Andrews", "Anthony", "Armstrong", "Arnold", "Ashley", "Atkins",
-          "Atkinson", "Austin", "Avery", "Avila", "Ayala", "Ayers", "Bailey",
-          "Baird", "Baker", "Baldwin", "Ball", "Ballard", "Banks", "Barber",
-          "Smith", "Johnson", "Williams", "Jones", "Brown", "Davis", "Miller",
-          "Wilson", "Moore", "Taylor", "Thomas", "Jackson", "Barker", "Barlow",
-          "Barnes", "Barnett", "Barr", "Barrera", "Barrett", "Barron", "Barry",
-          "Bartlett", "Barton", "Bass", "Bates", "Battle", "Bauer", "Baxter",
-          "Beach", "Bean", "Beard", "Beasley", "Beck", "Becker", "Bell",
-          "Bender", "Benjamin", "Bennett", "Benson", "Bentley", "Benton",
-          "Berg", "Berger", "Bernard", "Berry", "Best", "Bird", "Bishop",
-          "Black", "Blackburn", "Blackwell", "Blair", "Blake", "Blanchard",
-          "Blankenship", "Blevins", "Bolton", "Bond", "Bonner", "Booker",
-          "Boone", "Booth", "Bowen", "Bowers", "Bowman", "Boyd", "Boyer",
-          "Boyle", "Bradford", "Bradley", "Bradshaw", "Brady", "Branch",
-          "Bray", "Brennan", "Brewer", "Bridges", "Briggs", "Bright", "Britt",
-          "Brock", "Brooks", "Browning", "Bruce", "Bryan", "Bryant",
-          "Buchanan", "Buck", "Buckley", "Buckner", "Bullock", "Burch",
-          "Burgess", "Burke", "Burks", "Burnett", "Burns", "Burris", "Burt",
-          "Burton", "Bush", "Butler", "Byers", "Byrd", "Cabrera", "Cain",
-          "Calderon", "Caldwell", "Calhoun", "Callahan", "Camacho", "Cameron",
-          "Campbell", "Campos", "Cannon", "Cantrell", "Cantu", "Cardenas",
-          "Carey", "Carlson", "Carney", "Carpenter", "Carr", "Carrillo",
-          "Carroll", "Carson", "Carter", "Carver", "Case", "Casey", "Cash",
-          "Castaneda", "Castillo", "Castro", "Cervantes", "Chambers", "Chan",
-          "Chandler", "Chaney", "Chang", "Chapman", "Charles", "Chase",
-          "Chavez", "Chen", "Cherry", "Christensen", "Christian", "Church",
-          "Clark", "Clarke", "Clay", "Clayton", "Clements", "Clemons",
-          "Cleveland", "Cline", "Cobb", "Cochran", "Coffey", "Cohen", "Cole",
-          "Coleman", "Collier", "Collins", "Colon", "Combs", "Compton",
-          "Conley", "Conner", "Conrad", "Contreras", "Conway", "Cook", "Cooke",
-          "Cooley", "Cooper", "Copeland", "Cortez", "Cote", "Cotton", "Cox",
-          "Craft", "Craig", "Crane", "Crawford", "Crosby", "Cross", "Cruz",
-          "Cummings", "Cunningham", "Curry", "Curtis", "Dale", "Dalton",
-          "Daniel", "Daniels", "Daugherty", "Davenport", "David", "Davidson",
-          "Dawson", "Day", "Dean", "Decker", "Dejesus", "Delacruz", "Delaney",
-          "Deleon", "Delgado", "Dennis", "Diaz", "Dickerson", "Dickinson",
-          "Dillard", "Dillon", "Dixon", "Dodson", "Dominguez", "Donaldson",
-          "Donovan", "Dorsey", "Dotson", "Douglas", "Downs", "Doyle", "Drake",
-          "Dudley", "Duffy", "Duke", "Duncan", "Dunlap", "Dunn", "Duran",
-          "Durham", "Dyer", "Eaton", "Edwards", "Elliott", "Ellis", "Ellison",
-          "Emerson", "England", "English", "Erickson", "Espinoza", "Estes",
-          "Estrada", "Evans", "Everett", "Ewing", "Farley", "Farmer",
-          "Farrell", "Faulkner", "Ferguson", "Fernandez", "Ferrell", "Fields",
-          "Figueroa", "Finch", "Finley", "Fischer", "Fisher", "Fitzgerald",
-          "Fitzpatrick", "Fleming", "Fletcher", "Flores", "Flowers", "Floyd",
-          "Flynn", "Foley", "Forbes", "Ford", "Foreman", "Foster", "Fowler",
-          "Fox", "Francis", "Franco", "Frank", "Franklin", "Franks", "Frazier",
-          "Frederick", "Freeman", "French", "Frost", "Fry", "Frye", "Fuentes",
-          "Fuller", "Fulton", "Gaines", "Gallagher", "Gallegos", "Galloway",
-          "Gamble", "Garcia", "Gardner", "Garner", "Garrett", "Garrison",
-          "Garza", "Gates", "Gay", "Gentry", "George", "Gibbs", "Gibson",
-          "Gilbert", "Giles", "Gill", "Gillespie", "Gilliam", "Gilmore",
-          "Glass", "Glenn", "Glover", "Goff", "Golden", "Gomez", "Gonzales",
-          "Gonzalez", "Good", "Goodman", "Goodwin", "Gordon", "Gould",
-          "Graham", "Grant", "Graves", "Gray", "Green", "Greene", "Greer",
-          "Gregory", "Griffin", "Griffith", "Grimes", "Gross", "Guerra",
-          "Guerrero", "Guthrie", "Gutierrez", "Guy", "Guzman", "Hahn", "Hale",
-          "Haley", "Hall", "Hamilton", "Hammond", "Hampton", "Hancock",
-          "Haney", "Hansen", "Hanson", "Hardin", "Harding", "Hardy", "Harmon",
-          "Harper", "Harris", "Harrington", "Harrison", "Hart", "Hartman",
-          "Harvey", "Hatfield", "Hawkins", "Hayden", "Hayes", "Haynes", "Hays",
-          "Head", "Heath", "Hebert", "Henderson", "Hendricks", "Hendrix",
-          "Henry", "Hensley", "Henson", "Herman", "Hernandez", "Herrera",
-          "Herring", "Hess", "Hester", "Hewitt", "Hickman", "Hicks", "Higgins",
-          "Hill", "Hines", "Hinton", "Hobbs", "Hodge", "Hodges", "Hoffman",
-          "Hogan", "Holcomb", "Holden", "Holder", "Holland", "Holloway",
-          "Holman", "Holmes", "Holt", "Hood", "Hooper", "Hoover", "Hopkins",
-          "Hopper", "Horn", "Horne", "Horton", "House", "Houston", "Howard",
-          "Howe", "Howell", "Hubbard", "Huber", "Hudson", "Huff", "Huffman",
-          "Hughes", "Hull", "Humphrey", "Hunt", "Hunter", "Hurley", "Hurst",
-          "Hutchinson", "Hyde", "Ingram", "Irwin", "Jacobs", "Jacobson",
-          "James", "Jarvis", "Jefferson", "Jenkins", "Jennings", "Jensen",
-          "Jimenez", "Johns", "Johnston", "Jordan", "Joseph", "Joyce",
-          "Joyner", "Juarez", "Justice", "Kane", "Kaufman", "Keith", "Keller",
-          "Kelley", "Kelly", "Kemp", "Kennedy", "Kent", "Kerr", "Key", "Kidd",
-          "Kim", "King", "Kinney", "Kirby", "Kirk", "Kirkland", "Klein",
-          "Kline", "Knapp", "Knight", "Knowles", "Knox", "Koch", "Kramer",
-          "Lamb", "Lambert", "Lancaster", "Landry", "Lane", "Lang", "Langley",
-          "Lara", "Larsen", "Larson", "Lawrence", "Lawson", "Le", "Leach",
-          "Leblanc", "Lee", "Leon", "Leonard", "Lester", "Levine", "Levy",
-          "Lewis", "Lindsay", "Lindsey", "Little", "Livingston", "Lloyd",
-          "Logan", "Long", "Lopez", "Lott", "Love", "Lowe", "Lowery", "Lucas",
-          "Luna", "Lynch", "Lynn", "Lyons", "Macdonald", "Macias", "Mack",
-          "Madden", "Maddox", "Maldonado", "Malone", "Mann", "Manning",
-          "Marks", "Marquez", "Marsh", "Marshall", "Martin", "Martinez",
-          "Mason", "Massey", "Mathews", "Mathis", "Matthews", "Maxwell", "May",
-          "Mayer", "Maynard", "Mayo", "Mays", "McBride", "McCall", "McCarthy",
-          "McCarty", "McClain", "McClure", "McConnell", "McCormick", "McCoy",
-          "McCray", "McCullough", "McDaniel", "McDonald", "McDowell",
-          "McFadden", "McFarland", "McGee", "McGowan", "McGuire", "McIntosh",
-          "McIntyre", "McKay", "McKee", "McKenzie", "McKinney", "McKnight",
-          "McLaughlin", "McLean", "McLeod", "McMahon", "McMillan", "McNeil",
-          "McPherson", "Meadows", "Medina", "Mejia", "Melendez", "Melton",
-          "Mendez", "Mendoza", "Mercado", "Mercer", "Merrill", "Merritt",
-          "Meyer", "Meyers", "Michael", "Middleton", "Miles", "Mills",
-          "Miranda", "Mitchell", "Molina", "Monroe", "Montgomery", "Montoya",
-          "Moody", "Moon", "Mooney", "Morales", "Moran", "Moreno", "Morgan",
-          "Morin", "Morris", "Morrison", "Morrow", "Morse", "Morton", "Moses",
-          "Mosley", "Moss", "Mueller", "Mullen", "Mullins", "Munoz", "Murphy",
-          "Murray", "Myers", "Nash", "Navarro", "Neal", "Nelson", "Newman",
-          "Newton", "Nguyen", "Nichols", "Nicholson", "Nielsen", "Nieves",
-          "Nixon", "Noble", "Noel", "Nolan", "Norman", "Norris", "Norton",
-          "Nunez", "Obrien", "Ochoa", "Oconnor", "Odom", "Odonnell", "Oliver",
-          "Olsen", "Olson", "O'neal", "O'neil", "O'neill", "Orr", "Ortega",
-          "Ortiz", "Osborn", "Osborne", "Owen", "Owens", "Pace", "Pacheco",
-          "Padilla", "Page", "Palmer", "Park", "Parker", "Parks", "Parrish",
-          "Parsons", "Pate", "Patel", "Patrick", "Patterson", "Patton", "Paul",
-          "Payne", "Pearson", "Peck", "Pena", "Pennington", "Perez", "Perkins",
-          "Perry", "Peters", "Petersen", "Peterson", "Petty", "Phelps",
-          "Phillips", "Pickett", "Pierce", "Pittman", "Pitts", "Pollard",
-          "Poole", "Pope", "Porter", "Potter", "Potts", "Powell", "Powers",
-          "Pratt", "Preston", "Price", "Prince", "Pruitt", "Puckett", "Pugh",
-          "Quinn", "Ramirez", "Ramos", "Ramsey", "Randall", "Randolph",
-          "Rasmussen", "Ratliff", "Ray", "Raymond", "Reed", "Reese", "Reeves",
-          "Reid", "Reilly", "Reyes", "Reynolds", "Rhodes", "Rice", "Rich",
-          "Richard", "Richards", "Richardson", "Richmond", "Riddle", "Riggs",
-          "Riley", "Rios", "Rivas", "Rivera", "Rivers", "Roach", "Robbins",
-          "Roberson", "Roberts", "Robertson", "Robinson", "Robles", "Rocha",
-          "Rodgers", "Rodriguez", "Rodriquez", "Rogers", "Rojas", "Rollins",
-          "Roman", "Romero", "Rosa", "Rosales", "Rosario", "Rose", "Ross",
-          "Roth", "Rowe", "Rowland", "Roy", "Ruiz", "Rush", "Russell", "Russo",
-          "Rutledge", "Ryan", "Salas", "Salazar", "Salinas", "Sampson",
-          "Sanchez", "Sanders", "Sandoval", "Sanford", "Santana", "Santiago",
-          "Santos", "Sargent", "Saunders", "Savage", "Sawyer", "Schmidt",
-          "Schneider", "Schroeder", "Schultz", "Schwartz", "Scott", "Sears",
-          "Sellers", "Serrano", "Sexton", "Shaffer", "Shannon", "Sharp",
-          "Sharpe", "Shaw", "Shelton", "Shepard", "Shepherd", "Sheppard",
-          "Sherman", "Shields", "Short", "Silva", "Simmons", "Simon",
-          "Simpson", "Sims", "Singleton", "Skinner", "Slater", "Sloan",
-          "Small", "Snider", "Snow", "Snyder", "Solis", "Solomon", "Sosa",
-          "Soto", "Sparks", "Spears", "Spence", "Spencer", "Stafford",
-          "Stanley", "Stanton", "Stark", "Steele", "Stein", "Stephens",
-          "Stephenson", "Stevens", "Stevenson", "Stewart", "Stokes", "Stone",
-          "Stout", "Strickland", "Strong", "Stuart", "Suarez", "Sullivan",
-          "Summers", "Sutton", "Swanson", "Sweeney", "Sweet", "Sykes",
-          "Talley", "Tanner", "Tate", "Terrell", "Terry", "Thompson",
-          "Thornton", "Tillman", "Todd", "Torres", "Townsend", "Tran",
-          "Travis", "Trevino", "Trujillo", "Tucker", "Turner", "Tyler",
-          "Tyson", "Underwood", "Valdez", "Valencia", "Valentine",
-          "Valenzuela", "Vance", "Vang", "Vargas", "Vasquez", "Vaughan",
-          "Vaughn", "Vazquez", "Vega", "Velasquez", "Velazquez", "Velez",
-          "Van halen", "Vincent", "Vinson", "Wade", "Wagner", "Walker", "Wall",
-          "Wallace", "Waller", "Walls", "Walsh", "Walter", "Walters", "Walton",
-          "Ward", "Ware", "Warner", "Warren", "Washington", "Waters",
-          "Watkins", "Watson", "Watts", "Weaver", "Webb", "Weber", "Webster",
-          "Weeks", "Weiss", "Welch", "Wells", "West", "Wheeler", "Whitaker",
-          "White", "Whitehead", "Whitfield", "Whitley", "Whitney", "Wiggins",
-          "Wilcox", "Wilder", "Wiley", "Wilkerson", "Wilkins", "Wilkinson",
-          "William", "Williamson", "Willis", "Winters", "Wise", "Witt", "Wolf",
-          "Wolfe", "Wong", "Wood", "Woodard", "Woods", "Woodward", "Wooten",
-          "Workman", "Wright", "Wyatt", "Wynn", "Yang", "Yates", "York",
-          "Young", "Zamora", "Zimmerman")
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/scala/org/apache/bigtop/bigpetstore/generator/TransactionIteratorFactory.scala b/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/scala/org/apache/bigtop/bigpetstore/generator/TransactionIteratorFactory.scala
deleted file mode 100644
index 534c606..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/main/scala/org/apache/bigtop/bigpetstore/generator/TransactionIteratorFactory.scala
+++ /dev/null
@@ -1,106 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.generator;
-
-import java.util.Date
-import org.apache.bigtop.bigpetstore.generator.util.State
-import org.apache.commons.lang3.StringUtils
-import java.util.Arrays.asList
-import java.util.Random
-import scala.collection.Iterator
-import com.sun.org.apache.xml.internal.serializer.ToStream
-import java.util.{Iterator => JavaIterator}
-import scala.collection.JavaConversions.asJavaIterator
-import org.apache.bigtop.bigpetstore.generator.util.Product
-import org.apache.commons.lang3.Range;
-import org.apache.bigtop.bigpetstore.generator.util.ProductType
-
-/**
- * This class generates our data. Over time we will use it to embed bias which
- * can then be teased out, i.e. by clustering/classifiers. For example:
- *
- * certain products <--> certain years or days
- */
-class TransactionIteratorFactory(private val records: Int,
-        private val customerIdRange: Range[java.lang.Long],
-        private val state: State) {
-  assert(records > 0, "Number of records must be greater than 0 to generate a data iterator!")
-  private val random = new Random(state.hashCode)
-
-  def data: JavaIterator[TransactionIteratorFactory.KeyVal[String, String]] = {
-    new TransactionIteratorFactory.DataIterator(records, customerIdRange, state, random)
-  }
-}
-
-object TransactionIteratorFactory {
-  class KeyVal[K, V](val key: K, val value: V)
-
-  private class DataIterator(records: Int,
-          customerIdRange: Range[java.lang.Long],
-          state: State,
-          r: Random) extends Iterator[KeyVal[String, String]] {
-    private var firstName: String = null
-    private var lastName: String = null
-    private var elementsProcducedCount = 0
-    private var repeatCount = 0
-    private var currentCustomerId = customerIdRange.getMinimum
-    private var currentProductType = selectRandomProductType;
-
-    def hasNext =
-      elementsProcducedCount < records && currentCustomerId <= customerIdRange.getMaximum
-
-
-    def next(): TransactionIteratorFactory.KeyVal[String,String] = {
-      val date = DataForger.randomDateInPastYears(50);
-      setIteratorState();
-
-      val product = randomProductOfCurrentlySelectedType
-      val key = StringUtils.join(asList("BigPetStore", "storeCode_" + state.name(),
-              elementsProcducedCount.toString), ",")
-      val value = StringUtils.join(asList(currentCustomerId, firstName, lastName, product.id,
-              product.name.toLowerCase, product.price, date), ",")
-
-      elementsProcducedCount += 1
-      new TransactionIteratorFactory.KeyVal(key, value)
-    }
-
-    private def setIteratorState() = {
-      /** Some customers come back for more :) We repeat a customer up to ten times */
-      if (repeatCount > 0) {
-        repeatCount -= 1
-      } else {
-        firstName = DataForger.firstName(r)
-        lastName = DataForger.lastName(r)
-        // this sometimes generates numbers much larger than 10. We don't really need Gaussian
-        // distribution since number of transactions per customer can be truly arbitrary.
-        repeatCount = (r.nextGaussian * 4f) toInt;
-        println("####Repeat: " + repeatCount)
-        currentCustomerId += 1
-        currentProductType = selectRandomProductType;
-      }
-    }
-
-    private def selectRandomProductType = {
-      ProductType.values.apply(r.nextInt(ProductType.values.length))
-    }
-
-    private def randomProductOfCurrentlySelectedType = {
-      currentProductType.getProducts.get(r.nextInt(currentProductType.getProducts.size))
-    }
-  }
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/test/java/org/apache/bigtop/bigpetstore/docs/TestDocs.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/test/java/org/apache/bigtop/bigpetstore/docs/TestDocs.java
deleted file mode 100644
index 8d7bf99..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/test/java/org/apache/bigtop/bigpetstore/docs/TestDocs.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.docs;
-
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-
-import org.apache.bigtop.bigpetstore.util.BigPetStoreConstants.OUTPUTS;
-import org.apache.commons.io.FileUtils;
-import org.junit.Test;
-
-public class TestDocs {
-
-	@Test
-	public void testGraphViz() throws Exception {
-		// test the graphviz file by grepping out the constants.
-		String graphviz = FileUtils.readFileToString(new File("arch.dot"));
-		System.out.println(graphviz);
-
-		assertTrue(graphviz.contains(OUTPUTS.generated.name()));
-		assertTrue(graphviz.contains(OUTPUTS.cleaned.name()));
-	}
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/test/java/org/apache/bigtop/bigpetstore/generator/TestNumericalIdUtils.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/test/java/org/apache/bigtop/bigpetstore/generator/TestNumericalIdUtils.java
deleted file mode 100644
index e2f1f25..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/test/java/org/apache/bigtop/bigpetstore/generator/TestNumericalIdUtils.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.generator;
-
-import static org.junit.Assert.assertFalse;
-
-import org.apache.bigtop.bigpetstore.generator.util.State;
-import org.apache.bigtop.bigpetstore.util.NumericalIdUtils;
-import org.junit.Test;
-
-public class TestNumericalIdUtils {
-
-    @Test
-    public void testName() {
-        String strId= State.OK.name()+"_"+ "jay vyas";
-        long id = NumericalIdUtils.toId(strId);
-        String strId2= State.CO.name()+"_"+ "jay vyas";
-        long id2 = NumericalIdUtils.toId(strId2);
-        System.out.println(id + " " + id2);
-        assertFalse(id==id2);
-    }
-}
\ No newline at end of file
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/test/java/org/apache/bigtop/bigpetstore/generator/TestPetStoreTransactionGeneratorJob.java b/bigtop-bigpetstore/bigpetstore-mapreduce/src/test/java/org/apache/bigtop/bigpetstore/generator/TestPetStoreTransactionGeneratorJob.java
deleted file mode 100755
index dc23562..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/test/java/org/apache/bigtop/bigpetstore/generator/TestPetStoreTransactionGeneratorJob.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore.generator;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.io.BufferedReader;
-import java.io.DataInputStream;
-import java.io.InputStreamReader;
-import java.util.Date;
-
-import org.apache.bigtop.bigpetstore.generator.BPSGenerator.props;
-import org.apache.bigtop.bigpetstore.generator.util.State;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.Job;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * run this test with vm options -Xms512m -Xmx1024m
- *
- */
-public class TestPetStoreTransactionGeneratorJob {
-
-    final static Logger log = LoggerFactory
-            .getLogger(TestPetStoreTransactionGeneratorJob.class);
-
-    @Test
-    public void test() throws Exception {
-        System.out.println("memory : " + Runtime.getRuntime().freeMemory()
-                / 1000000);
-        if (Runtime.getRuntime().freeMemory() / 1000000 < 75) {
-            // throw new
-            // RuntimeException("need more memory to run this test !");
-        }
-        int records = 20;
-        /**
-         * Setup configuration with prop.
-         */
-        Configuration c = new Configuration();
-        c.setInt(props.bigpetstore_records.name(), records);
-
-        /**
-         * Run the job
-         */
-        Path output = new Path("petstoredata/" + (new Date()).toString());
-        Job createInput = BPSGenerator.getCreateTransactionRecordsJob(output, c);
-        createInput.submit();
-        System.out.println(createInput);
-        createInput.waitForCompletion(true);
-
-        FileSystem fs = FileSystem.getLocal(new Configuration());
-
-        /**
-         * Read file output into string.
-         */
-        DataInputStream f = fs.open(new Path(output, "part-r-00000"));
-        BufferedReader br = new BufferedReader(new InputStreamReader(f));
-        String s;
-        int recordsSeen = 0;
-        boolean CTseen = false;
-        boolean AZseen = false;
-
-        // confirm that both CT and AZ are seen in the outputs.
-        while (br.ready()) {
-            s = br.readLine();
-            System.out.println("===>" + s);
-            recordsSeen++;
-            if (s.contains(State.CT.name())) {
-                CTseen = true;
-            }
-            if (s.contains(State.AZ.name())) {
-                AZseen = true;
-            }
-        }
-
-        // records seen should = 20
-        assertEquals(records, recordsSeen);
-        // Assert that a couple of the states are seen (todo make it
-        // comprehensive for all states).
-        assertTrue(CTseen);
-        assertTrue(AZseen);
-        log.info("Created " + records + " , file was "
-                + fs.getFileStatus(new Path(output, "part-r-00000")).getLen()
-                + " bytes.");
-    }
-}
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/test/resources/log4j.properties b/bigtop-bigpetstore/bigpetstore-mapreduce/src/test/resources/log4j.properties
deleted file mode 100644
index 1e33093..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,47 +0,0 @@
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-hadoop.root.logger=INFO,console
-hadoop.log.dir=.
-hadoop.log.file=hadoop.log
-
-#
-# Job Summary Appender
-#
-# Use following logger to send summary to separate file defined by
-# hadoop.mapreduce.jobsummary.log.file rolled daily:
-# hadoop.mapreduce.jobsummary.logger=INFO,JSA
-#
-hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
-hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.EventCounter=org.apache.log4j.ConsoleAppender
-log4j.appender.EventCounter.layout=org.apache.log4j.PatternLayout
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hadoop.root.logger}, EventCounter
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-# Logging Threshold
-log4j.threshold=ALL
-
-#
-# Daily Rolling File Appender
-#
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
diff --git a/bigtop-bigpetstore/bigpetstore-mapreduce/src/test/scala/org/apache/bigtop/bigpetstore/ScalaTestSample.scala b/bigtop-bigpetstore/bigpetstore-mapreduce/src/test/scala/org/apache/bigtop/bigpetstore/ScalaTestSample.scala
deleted file mode 100644
index a393b4b..0000000
--- a/bigtop-bigpetstore/bigpetstore-mapreduce/src/test/scala/org/apache/bigtop/bigpetstore/ScalaTestSample.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.bigpetstore
-
-import org.junit.Test
-import org.junit.runner.RunWith
-import org.scalatest.junit.JUnitRunner
-import org.scalatest._
-import scala.collection.mutable.Stack
-
-@RunWith(classOf[JUnitRunner])
-class ScalaTestSample extends FlatSpec with Matchers {
-	"This test" should "show an example of what we can do with the scala-test library" in {
-		val stack = new Stack[Int]
-		stack.push(1)
-		stack.push(2)
-		stack.pop() should be(2)
-		stack.pop() should be(1)
-	}
-}
diff --git a/bigtop-bigpetstore/bigpetstore-spark/README.md b/bigtop-bigpetstore/bigpetstore-spark/README.md
deleted file mode 100644
index 8817186..0000000
--- a/bigtop-bigpetstore/bigpetstore-spark/README.md
+++ /dev/null
@@ -1,175 +0,0 @@
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-BigPetStore -- Spark
-====================
-
-BigPetStore is a family of example applications for the Hadoop and Spark
-ecosystems.  BigPetStore is build around a fictional chain pet stores,
-providing generators for synthetic transaction data and pipelines for
-processing that data.  Each ecosystems has its own version of the
-application.
-
-The Spark application currently builds against Spark 1.3.0.
-
-Architecture
-------------
-The Spark application consists of the following modules so far:
-
-* generator: generates raw data on the dfs
-* datamodel: data model used as input for analytics components
-* etl: normalizes and transforms the raw data to the data model
-
-Data Model
-----------
-
-The data generator creates a dirty CSV file containing the following fields:
-
-* Store ID: Int
-* Store Zipcode: String
-* Store City: String
-* Store State: String
-* Customer ID: Int
-* Customer First Name: String
-* Customer Last Name: String
-* Customer Zipcode: String
-* Customer City: String
-* Customer State: String
-* Transaction ID: Int
-* Transation Date Time: String (e.g., "Tue Nov 03 01:08:11 EST 2014")
-* Transaction Product: String (e.g., "category=dry cat food;brand=Feisty Feline;flavor=Chicken & Rice;size=14.0;per_unit_cost=2.14;")
-
-Note that the transaction ID is unique only per customer -- the customer and transaction IDs form a unique composite key.
-
-Since the dirty CSV data contains repetitive information and requires massaging to use for analytics, an
-internal structured data model is defined as input for the analytics components:
-
-* Location(zipcode: String, city: String, state: String)
-* Customer(customerId: Long, firstName: String, lastName: String, zipcode: String)
-* Store(storeId: Long, zipcode: String)
-* Product(productId: Long, category: String, attributes: Map[String, String])
-* Transaction(customerId: Long, transactionId: Long, storeId: Long, dateTime: java.util.Calendar, productId: Long)
-
-The ETL stage parses and cleans up the dirty CSV and writes out RDDs for each data type in the data model, serialized using
-the `saveAsObjectFile()` method.  The analytics components can use the `IOUtils.load()` method to de-serialize the structured
-data.
-
-Running Tests
--------------
-BigPetStore Spark includes unit tests that you can run with the following command:
-
-```
-gradle clean test
-```
-
-Building and Running with Spark
--------------------------------
-BigPetStore has a Spark driver for generating data with the new data generator.
-Build a fat jar as follows:
-
-```
-gradle clean shadowJar
-```
-
-This will produce a jar file under `build/libs` (referred to as `bigpetstore-spark-X.jar`).  You can then
-use this jar to run a Spark job as follows:
-
-```
-spark-submit --master local[2] --class org.apache.bigtop.bigpetstore.spark.generator.SparkDriver bigpetstore-spark-X.jar generated_data/ 10 1000 365.0 345
-```
-
-You will need to change the master if you want to run on a cluster.  The last five parameters control the output directory,
-the number of stores, the number of customers, simulation length (in days), and the random seed (which is optional).
-
-
-Running the ETL component
--------------------------
-The data produced by the generator is in a raw text format, similar to what users will see in production environments.
-The raw data isn't normalized (e.g., repeated customer, store, location, and product information) and needs to be parsed
-(e.g., dates) before it can be easily used.  The ETL component does this for us.
-
-The ETL component:
-
-* Reads the raw data
-* Parses the data times and products
-* Normalizes the data
-* Writes out RDDs for each type of class (Store, Customer, Location, Product, Transaction) in the data model
-
-After building the jar (see above), you can run the ETL component like so:
-
-```
-spark-submit --master local[2] --class org.apache.bigtop.bigpetstore.spark.etl.SparkETL bigpetstore-spark-X.jar generated_data transformed_data
-```
-
-Running the SparkSQL component
--------------------------------
-
-Once ETL'd we can now process the data and do analytics on it.  The DataModel.scala class itself is used to read/write classes
-from files.  To run the analytics job, which outputs a JSON file at the end, you now will run the following:
-
-```
-spark-submit --master local[2] --class org.apache.bigtop.bigpetstore.spark.analytics.PetStoreStatistics bigpetstore-spark-X.jar transformed_data PetStoreStats.json
-```
-
-Current queries include:
-
-1. Total Transactions
-2. Transaction Counts by Month
-3. Transaction Counts by Product
-4. Transaction Counts by Product and Store Zipcode
-
-This will output a JSON file to the current directory, which has formatting (approximately) like this.
-
-```
-{
-   "totalTransaction":34586,
-   "transactionsByZip":[
-  {"count":64,"productId":54,"zipcode":"94583"},{"count":38,"productId":18,"zipcode":"34761"},
-   {"count":158,"productId":14,"zipcode":"11368"},{"count":66,"productId":46,"zipcode":"33027"},
-   {"count":52,"productId":27,"zipcode":"94583"},{"count":84,"productId":19,"zipcode":"33027"},
-   {"count":143,"productId":0,"zipcode":"94583"},{"count":58,"productId":41,"zipcode":"72715"},
-   {"count":76,"productId":54,"zipcode":"15014"},{"count":118,"productId":52,"zipcode":"45439"}},
-     ..... (several more) ....
-   "productDetails":[
-      {
-         "productId":0,
-         "category":"kitty litter",
-         "attributes":{
-            "category":"kitty litter",
-            "brand":"Pretty Cat",
-            "size":"7.0",
-            "per_unit_cost":"1.43"
-         }
-      },
-      {
-         "productId":2,
-         "category":"dry cat food",
-         "attributes":{
-```
-
-Of course, the above data is for a front end web app which will display charts/summary stats of the transactions.
-Keep tracking Apache BigTop for updates on this front !
-
-Running the Product Recommendation Component
---------------------------------------------
-
-BigPetStore can recommend products to customers using the alternating least squares (ALS) algorithm. The recommender can be run as follows:
-
-```
-spark-submit --master local[2] --class org.apache.bigtop.bigpetstore.spark.analytics.RecommendProducts bigpetstore-spark-X.jar transformed_data recommendations.json
-```
-
-The resulting json file will contain lists of customers, products, and products recommended to each customer.
diff --git a/bigtop-bigpetstore/bigpetstore-spark/arch.dot b/bigtop-bigpetstore/bigpetstore-spark/arch.dot
deleted file mode 100644
index 6cd3d1a..0000000
--- a/bigtop-bigpetstore/bigpetstore-spark/arch.dot
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
-* Licensed to the Apache Software Foundation (ASF) under one or more
-* contributor license agreements.  See the NOTICE file distributed with
-* this work for additional information regarding copyright ownership.
-* The ASF licenses this file to You under the Apache License, Version 2.0
-* (the "License"); you may not use this file except in compliance with
-* the License.  You may obtain a copy of the License at
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-digraph bigpetstore {
-
-   node [shape=record];
-
-
-   DIRTY_CSV [label="Raw, dirty data in CSV format"];
-   STRUCTURED_DATA [label="Data model serialized as sequence files"];
-   generator [label="Data Generator (generator.SparkDriver)"];
-   ETL [label="Extract-Transform-Load (etl.SparkETL)"];
-   SalesAnalytics [label="Sales Analytics (analytics.PetStoreStatistics)"];
-   SalesTables [label="Sales Trends Tables (JSON)"];
-   ItemRecommender [label="Item Recommender (analytics.RecommendProducts)"];
-   ItemRecommendations [label="Customer Product Recommendations (JSON)"];
-
-   generator -> DIRTY_CSV -> ETL -> STRUCTURED_DATA;
-   STRUCTURED_DATA -> SalesAnalytics -> SalesTables;
-   STRUCTURED_DATA -> ItemRecommender -> ItemRecommendations;
-}
diff --git a/bigtop-bigpetstore/bigpetstore-spark/build.gradle b/bigtop-bigpetstore/bigpetstore-spark/build.gradle
deleted file mode 100644
index 6d5a957..0000000
--- a/bigtop-bigpetstore/bigpetstore-spark/build.gradle
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-apply plugin: "java"
-apply plugin: "eclipse"
-// TODO add idea module config.
-apply plugin: "idea"
-apply plugin: "scala"
-apply plugin: 'com.github.johnrengelman.shadow'
-
-buildscript {
-  repositories { jcenter() }
-  dependencies {
-    classpath 'com.github.jengelman.gradle.plugins:shadow:1.0.2'
-  }
-}
-
-
-// Read the groupId and version properties from the "parent" bigtop project.
-// It would be better if there was some better way of doing this. Howvever,
-// at this point, we have to do this (or some variation thereof) since gradle
-// projects can't have maven projects as parents (AFAIK. If there is a way to do it,
-// it doesn't seem to be well-documented).
-def setProjectProperties() {
-    Node xml = new XmlParser().parse("../../pom.xml")
-    group = xml.groupId.first().value().first()
-    version = xml.version.first().value().first()
-}
-
-setProjectProperties()
-description = """"""
-
-// We are using 1.7 as gradle can't play well when java 8 and scala are combined.
-// There is an open issue here: http://issues.gradle.org/browse/GRADLE-3023
-// There is talk of this being resolved in the next version of gradle. Till then,
-// we are stuck with java 7. But we do have scala if we want more syntactic sugar.
-sourceCompatibility = 1.7
-targetCompatibility = 1.7
-
-// Specify any additional project properties.
-ext {
-    sparkVersion = "1.3.0"
-    scalaVersion = "2.10"
-}
-
-shadowJar {
-    zip64 true
-}
-
-repositories {
-    mavenCentral()
-    maven {
-        url "http://dl.bintray.com/rnowling/bigpetstore"
-    }
-}
-
-tasks.withType(AbstractCompile) {
-    options.encoding = 'UTF-8'
-    options.compilerArgs << "-Xlint:all"
-}
-
-tasks.withType(ScalaCompile) {
-    // Enables incremental compilation.
-    // http://www.gradle.org/docs/current/userguide/userguide_single.html#N12F78
-    scalaCompileOptions.useAnt = false
-}
-
-tasks.withType(Test) {
-    testLogging {
-        // Uncomment this if you want to see the console output from the tests.
-        // showStandardStreams = true
-        events "passed", "skipped", "failed"
-        // show standard out and standard error of the test JVM(s) on the console
-        //showStandardStreams = true
-    }
-}
-
-// Create a separate source-set for the src/integrationTest set of classes. The convention here
-// is that gradle will look for a directory with the same name as that of the specified source-set
-// under the 'src' directory.
-sourceSets {
-    main {
-        java.srcDirs = [];
-        scala.srcDirs = ["src/main/scala", "src/main/java"]
-    }
-}
-
-
-// To see the API that is being used here, consult the following docs
-// http://www.gradle.org/docs/current/dsl/org.gradle.api.artifacts.ResolutionStrategy.html
-def updateDependencyVersion(dependencyDetails, dependencyString) {
-    def parts = dependencyString.split(':')
-    def group = parts[0]
-    def name = parts[1]
-    def version = parts[2]
-    if (dependencyDetails.requested.group == group
-            && dependencyDetails.requested.name == name) {
-        dependencyDetails.useVersion version
-    }
-}
-
-
-dependencies {
-    compile "org.apache.spark:spark-core_${scalaVersion}:${sparkVersion}"
-    compile "org.apache.spark:spark-mllib_${scalaVersion}:${sparkVersion}"
-    compile "org.apache.spark:spark-network-shuffle_${scalaVersion}:${sparkVersion}"
-    compile "org.apache.spark:spark-sql_${scalaVersion}:${sparkVersion}"
-    compile "org.apache.spark:spark-graphx_${scalaVersion}:${sparkVersion}"
-    compile "org.apache.spark:spark-hive_${scalaVersion}:${sparkVersion}"
-    compile "com.github.rnowling.bigpetstore:bigpetstore-data-generator:0.2.1"
-    compile "joda-time:joda-time:2.7"
-    compile "org.json4s:json4s-jackson_2.10:3.1.0"
-
-    testCompile "junit:junit:4.11"
-    testCompile "org.hamcrest:hamcrest-all:1.3"
-    testCompile "org.scalatest:scalatest_${scalaVersion}:2.2.1"
-    testCompile "joda-time:joda-time:2.7"
-}
-
-task listJars << {
-    configurations.shadow.each { println it.name }
-}
-
-
-eclipse {
-    classpath {
-        // Comment out the following two lines if you want to generate an eclipse project quickly.
-        downloadSources = true
-        downloadJavadoc = false
-    }
-}
diff --git a/bigtop-bigpetstore/bigpetstore-spark/src/main/scala/org/apache/bigpetstore/spark/analytics/PetStoreStatistics.scala b/bigtop-bigpetstore/bigpetstore-spark/src/main/scala/org/apache/bigpetstore/spark/analytics/PetStoreStatistics.scala
deleted file mode 100644
index e7e5b08..0000000
--- a/bigtop-bigpetstore/bigpetstore-spark/src/main/scala/org/apache/bigpetstore/spark/analytics/PetStoreStatistics.scala
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
-*  Licensed to the Apache Software Foundation (ASF) under one or more
-*  contributor license agreements.  See the NOTICE file distributed with
-*  this work for additional information regarding copyright ownership.
-*  The ASF licenses this file to You under the Apache License, Version 2.0
-*  (the "License"); you may not use this file except in compliance with
-*  the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-*  Unless required by applicable law or agreed to in writing, software
-*  distributed under the License is distributed on an "AS IS" BASIS,
-*  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-*  See the License for the specific language governing permissions and
-*  limitations under the License.
-*/
-
-package org.apache.bigtop.bigpetstore.spark.analytics
-
-import java.io.File
-import java.sql.Timestamp
-
-import scala.Nothing
-
-import org.apache.spark.sql._
-import org.apache.spark.{SparkContext, SparkConf}
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd._
-
-import org.joda.time.DateTime
-import org.json4s.JsonDSL.WithBigDecimal._
-
-import org.apache.bigtop.bigpetstore.spark.datamodel._
-
-object PetStoreStatistics {
-
-    private def printUsage() {
-      val usage: String = "BigPetStore Analytics Module." +
-      "\n" +
-      "Usage: spark-submit ... inputDir outputFile\n " +
-      "inputDir - (string) Path to ETL'd data\n" +
-      "outputFile - (string) is a JSON file.  For schema, see the code.\n"
-
-      System.err.println(usage)
-    }
-
-  /**
-   * Scala details. Some or None are an idiomatic way, in scala, to
-   * return an optional value.  This allows us to signify, to the caller, that the
-   * method may fail.  The caller can decide how to deal with failure (i.e. using getOrElse).
-   * @param args
-   * @return
-   */
-    def parseArgs(args: Array[String]):(Option[String],Option[String]) = {
-      if(args.length < 1) {
-        (None, None)
-      } else if (args.length == 1) {
-        (Some(args(0)), None)
-      } else {
-        (Some(args(0)), Some(args(1)))
-      }
-    }
-
-  def productMap(r:Array[Product]) : Map[Long,Product] = {
-    r map (prod => prod.productId -> prod) toMap
-  }
-
-  def queryTxByMonth(sqlContext: SQLContext): Array[StatisticsTxByMonth] = {
-    import sqlContext._
-
-    val results: DataFrame = sql("SELECT count(*), month FROM Transactions GROUP BY month")
-    val transactionsByMonth = results.collect()
-    for(x<-transactionsByMonth){
-      println(x)
-    }
-
-    transactionsByMonth.map { r =>
-      StatisticsTxByMonth(r.getInt(1), r.getLong(0))
-    }
-  }
-
-  def queryTxByProductZip(sqlContext: SQLContext): Array[StatisticsTxByProductZip] = {
-    import sqlContext._
-
-    val results: DataFrame = sql(
-      """SELECT count(*) c, productId, zipcode
-FROM Transactions t
-JOIN Stores s ON t.storeId = s.storeId
-GROUP BY productId, zipcode""")
-
-    val groupedProductZips = results.collect()
-
-    //get list of all transactionsData
-    for(x<-groupedProductZips){
-      println("grouped product:zip " + x)
-    }
-
-    //Map JDBC Row into a Serializable case class.
-    groupedProductZips.map { r =>
-      StatisticsTxByProductZip(r.getLong(1),r.getString(2),r.getLong(0))
-    }
-  }
-
-  def queryTxByProduct(sqlContext: SQLContext): Array[StatisticsTxByProduct] = {
-    import sqlContext._
-
-    val results: DataFrame = sql(
-      """SELECT count(*) c, productId FROM Transactions GROUP BY productId""")
-
-    val groupedProducts = results.collect()
-
-    //Map JDBC Row into a Serializable case class.
-    groupedProducts.map { r =>
-      StatisticsTxByProduct(r.getLong(1),r.getLong(0))
-    }
-  }
-
-
-  def runQueries(r:(RDD[Location], RDD[Store], RDD[Customer], RDD[Product],
-    RDD[Transaction]), sc: SparkContext): Statistics = {
-
-    val sqlContext = new org.apache.spark.sql.SQLContext(sc)
-    import sqlContext._
-    import sqlContext.implicits._
-
-    // Transform the Non-SparkSQL Calendar into a SparkSQL-friendly field.
-    val mappableTransactions:RDD[TransactionSQL] =
-      r._5.map { trans => trans.toSQL() }
-
-    r._1.toDF().registerTempTable("Locations")
-    r._2.toDF().registerTempTable("Stores")
-    r._3.toDF().registerTempTable("Customers")
-    r._4.toDF().registerTempTable("Product")
-    mappableTransactions.toDF().registerTempTable("Transactions")
-
-
-    val txByMonth = queryTxByMonth(sqlContext)
-    val txByProduct = queryTxByProduct(sqlContext)
-    val txByProductZip = queryTxByProductZip(sqlContext)
-
-    return Statistics(
-      txByMonth.map { s => s.count }.reduce(_+_),  // Total number of transactions
-      txByMonth,
-      txByProduct,
-      txByProductZip,
-      r._4.collect()) // Product details
-  }
-
-    /**
... 522691 lines suppressed ...