You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2017/10/19 12:54:47 UTC

[01/50] [abbrv] ambari git commit: AMBARI-22147 Move stacks/HDP/3.0/kerberos.json to stacks/kerberos.json (dsen)

Repository: ambari
Updated Branches:
  refs/heads/branch-3.0-perf 7da02bb9f -> af30ab409


AMBARI-22147 Move stacks/HDP/3.0/kerberos.json to stacks/kerberos.json (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c3f6ba76
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c3f6ba76
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c3f6ba76

Branch: refs/heads/branch-3.0-perf
Commit: c3f6ba7621963b1086b7d9754d49b8663c7892b5
Parents: 3acfa5c
Author: Dmytro Sen <ds...@apache.org>
Authored: Thu Oct 12 13:52:31 2017 +0300
Committer: Dmytro Sen <ds...@apache.org>
Committed: Thu Oct 12 13:52:31 2017 +0300

----------------------------------------------------------------------
 ambari-server/src/main/assemblies/server.xml | 5 +++++
 1 file changed, 5 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c3f6ba76/ambari-server/src/main/assemblies/server.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/assemblies/server.xml b/ambari-server/src/main/assemblies/server.xml
index 3079d1b..37283fa 100644
--- a/ambari-server/src/main/assemblies/server.xml
+++ b/ambari-server/src/main/assemblies/server.xml
@@ -392,6 +392,11 @@
       <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
     </file>
     <file>
+      <fileMode>755</fileMode>
+      <source>src/main/resources/kerberos.json</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
       <fileMode>644</fileMode>
       <source>src/main/resources/slider_resources/README.txt</source>
       <outputDirectory>/var/lib/ambari-server/resources/apps</outputDirectory>


[50/50] [abbrv] ambari git commit: Merge remote-tracking branch 'remotes/origin/trunk' into branch-3.0-perf

Posted by ao...@apache.org.
Merge remote-tracking branch 'remotes/origin/trunk' into branch-3.0-perf


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/af30ab40
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/af30ab40
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/af30ab40

Branch: refs/heads/branch-3.0-perf
Commit: af30ab40972022ce4bff6a32b4ebdd29a33ef95d
Parents: 7da02bb b4eddc9
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Oct 19 15:54:09 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Oct 19 15:54:09 2017 +0300

----------------------------------------------------------------------
 .../ui/admin-web/app/scripts/i18n.config.js     |   2 +-
 ambari-agent/pom.xml                            |   4 +-
 .../main/python/ambari_agent/AmbariConfig.py    |  74 +-
 .../ambari_agent/CustomServiceOrchestrator.py   |   6 +-
 .../src/main/python/ambari_agent/FileCache.py   |   5 +-
 .../main/python/ambari_agent/ProcessHelper.py   |  71 --
 .../src/main/python/ambari_agent/StatusCheck.py | 142 ----
 .../src/main/python/ambari_agent/main.py        |  12 +-
 .../test/python/ambari_agent/TestFileCache.py   |   4 +-
 .../test/python/ambari_agent/TestLiveStatus.py  |   5 +-
 .../src/test/python/ambari_agent/TestMain.py    |  13 +-
 .../python/ambari_agent/TestProcessHelper.py    |  70 --
 .../test/python/ambari_agent/TestSecurity.py    |   1 -
 .../test/python/ambari_agent/TestStatusCheck.py | 180 -----
 .../resource_management/TestPackageResource.py  |   6 +-
 .../python/resource_management/TestScript.py    |  26 +-
 .../core/providers/package/__init__.py          |   4 +-
 .../core/providers/package/apt.py               |  35 +-
 .../core/providers/package/choco.py             |   8 +-
 .../core/providers/package/yumrpm.py            |  10 +-
 .../core/providers/package/zypper.py            |   7 +-
 .../core/resources/packaging.py                 |  14 +-
 .../libraries/functions/component_version.py    |  26 +-
 .../libraries/functions/conf_select.py          | 362 +++------
 .../libraries/functions/repository_util.py      |   8 +-
 .../libraries/functions/stack_select.py         |  69 +-
 .../libraries/script/script.py                  |  88 ++-
 ambari-infra/ambari-infra-assembly/pom.xml      |  12 +
 .../src/main/package/deb/manager/postinst       |   5 +
 .../src/main/package/deb/solr-client/postinst   |  13 +
 .../src/main/package/rpm/manager/postinstall.sh |  20 +
 .../main/package/rpm/solr-client/postinstall.sh |  28 +
 .../org/apache/ambari/infra/InfraManager.java   |   4 +-
 .../src/main/resources/infraManager.sh          |  10 +-
 ambari-infra/ambari-infra-solr-client/build.xml |   3 +
 .../src/main/python/solrDataManager.py          |   0
 .../src/main/resources/solrIndexHelper.sh       |   5 +-
 .../ambari-logsearch-assembly/pom.xml           |   4 +-
 .../ambari-logsearch-logfeeder/pom.xml          |   4 +
 .../ambari-logsearch-server/pom.xml             |   4 +
 .../ambari-logsearch-web/package.json           |  19 +-
 ambari-logsearch/ambari-logsearch-web/pom.xml   |  20 +-
 .../ambari-logsearch-web/src/app/app.module.ts  |   8 +
 .../src/app/classes/active-service-log-entry.ts |  23 +
 .../src/app/classes/histogram-options.ts        |  36 +
 .../src/app/classes/list-item.class.ts          |  25 -
 .../src/app/classes/list-item.ts                |  26 +
 .../src/app/classes/models/app-settings.ts      |  27 +
 .../src/app/classes/models/app-state.ts         |  43 +
 .../src/app/classes/models/audit-log-field.ts   | 225 ++++++
 .../src/app/classes/models/audit-log.ts         |  46 ++
 .../src/app/classes/models/bar-graph.ts         |  24 +
 .../src/app/classes/models/common-entry.ts      |  22 +
 .../src/app/classes/models/count.ts             |  22 +
 .../src/app/classes/models/filter.ts            |  25 +
 .../src/app/classes/models/graph.ts             |  23 +
 .../src/app/classes/models/log-field.ts         |  27 +
 .../src/app/classes/models/log.ts               |  38 +
 .../src/app/classes/models/node.ts              |  30 +
 .../src/app/classes/models/service-log-field.ts | 107 +++
 .../src/app/classes/models/service-log.ts       |  27 +
 .../app/classes/models/solr-collection-state.ts |  23 +
 .../src/app/classes/models/store.ts             | 180 +++++
 .../src/app/classes/models/user-config.ts       |  26 +
 .../queries/audit-logs-query-params.class.ts    |  46 --
 .../classes/queries/audit-logs-query-params.ts  |  46 ++
 .../app/classes/queries/query-params.class.ts   |  23 -
 .../src/app/classes/queries/query-params.ts     |  23 +
 ...ce-logs-histogram-query-params.class.spec.ts | 203 -----
 ...service-logs-histogram-query-params.class.ts |  70 --
 .../service-logs-histogram-query-params.spec.ts | 203 +++++
 .../service-logs-histogram-query-params.ts      |  70 ++
 .../queries/service-logs-query-params.class.ts  |  30 -
 .../queries/service-logs-query-params.ts        |  30 +
 .../service-logs-truncated-query-params.ts      |  36 +
 .../app/classes/service-log-context-entry.ts    |  26 +
 .../accordion-panel.component.less              |   2 +-
 .../src/app/components/app.component.less       |   2 +-
 .../collapsible-panel.component.html            |  27 +
 .../collapsible-panel.component.less            |  42 +
 .../collapsible-panel.component.spec.ts         | 129 +++
 .../collapsible-panel.component.ts              |  89 +++
 .../dropdown-button.component.html              |   3 +-
 .../dropdown-button.component.less              |   2 +-
 .../dropdown-button.component.spec.ts           |   5 +-
 .../dropdown-button.component.ts                |   2 +-
 .../dropdown-list/dropdown-list.component.html  |   2 +-
 .../dropdown-list/dropdown-list.component.less  |   2 +-
 .../dropdown-list.component.spec.ts             |  12 +-
 .../dropdown-list/dropdown-list.component.ts    |  11 +-
 .../filter-button.component.spec.ts             |   5 +-
 .../filter-button/filter-button.component.ts    |   2 +-
 .../filter-dropdown.component.spec.ts           |   5 +-
 .../filters-panel/filters-panel.component.html  |   5 +-
 .../filters-panel/filters-panel.component.less  |   8 +-
 .../filters-panel.component.spec.ts             |   5 +-
 .../filters-panel/filters-panel.component.ts    |   4 +-
 .../log-context/log-context.component.html      |  33 +
 .../log-context/log-context.component.less      |  23 +
 .../log-context/log-context.component.spec.ts   | 108 +++
 .../log-context/log-context.component.ts        |  91 +++
 .../log-file-entry.component.html               |  20 +
 .../log-file-entry.component.less               |  31 +
 .../log-file-entry.component.spec.ts            |  56 ++
 .../log-file-entry/log-file-entry.component.ts  |  51 ++
 .../logs-container.component.html               |  20 +-
 .../logs-container.component.spec.ts            |  10 +-
 .../logs-container/logs-container.component.ts  |  54 +-
 .../logs-list/logs-list.component.html          |  30 +-
 .../logs-list/logs-list.component.less          |  38 +-
 .../logs-list/logs-list.component.spec.ts       |   3 +
 .../components/logs-list/logs-list.component.ts |  34 +-
 .../main-container.component.html               |   7 +
 .../main-container.component.less               |   6 +-
 .../main-container.component.spec.ts            |  13 +-
 .../main-container/main-container.component.ts  |  32 +-
 .../src/app/components/main.less                |  20 +
 .../menu-button/menu-button.component.less      |  18 +-
 .../menu-button/menu-button.component.spec.ts   |   5 +-
 .../menu-button/menu-button.component.ts        |   7 +-
 .../src/app/components/mixins.less              | 200 +++++
 .../pagination-controls.component.less          |   2 +-
 .../pagination/pagination.component.less        |   4 +-
 .../search-box/search-box.component.less        |   2 +-
 .../search-box/search-box.component.ts          |   2 +-
 .../time-histogram.component.less               |  24 +-
 .../time-histogram/time-histogram.component.ts  |  94 ++-
 .../time-range-picker.component.less            |   2 +-
 .../timezone-picker.component.spec.ts           |   5 +-
 .../components/top-menu/top-menu.component.html |   2 +-
 .../components/top-menu/top-menu.component.less |   2 +-
 .../src/app/components/variables.less           |  93 +--
 .../src/app/models/app-settings.model.ts        |  27 -
 .../src/app/models/app-state.model.ts           |  35 -
 .../src/app/models/audit-log-field.model.ts     | 225 ------
 .../src/app/models/audit-log.model.ts           |  46 --
 .../src/app/models/bar-graph.model.ts           |  24 -
 .../src/app/models/common-entry.model.ts        |  22 -
 .../src/app/models/count.model.ts               |  22 -
 .../src/app/models/filter.model.ts              |  25 -
 .../src/app/models/graph.model.ts               |  23 -
 .../src/app/models/log-field.model.ts           |  27 -
 .../src/app/models/log.model.ts                 |  37 -
 .../src/app/models/node.model.ts                |  29 -
 .../src/app/models/service-log-field.model.ts   | 107 ---
 .../src/app/models/service-log.model.ts         |  27 -
 .../app/models/solr-collection-state.model.ts   |  23 -
 .../src/app/models/store.model.ts               | 169 ----
 .../src/app/models/user-config.model.ts         |  26 -
 .../services/component-actions.service.spec.ts  |   8 +-
 .../app/services/component-actions.service.ts   |  52 +-
 .../component-generator.service.spec.ts         |  10 +-
 .../app/services/component-generator.service.ts |  23 +-
 .../src/app/services/filtering.service.spec.ts  |  27 +
 .../src/app/services/filtering.service.ts       |  97 ++-
 .../src/app/services/http-client.service.ts     |  15 +-
 .../app/services/logs-container.service.spec.ts |   8 +-
 .../src/app/services/logs-container.service.ts  |  70 +-
 .../services/storage/app-settings.service.ts    |   4 +-
 .../app/services/storage/app-state.service.ts   |   4 +-
 .../storage/audit-logs-fields.service.ts        |   2 +-
 .../app/services/storage/audit-logs.service.ts  |   2 +-
 .../app/services/storage/clusters.service.ts    |   2 +-
 .../app/services/storage/components.service.ts  |   2 +-
 .../src/app/services/storage/filters.service.ts |   2 +-
 .../src/app/services/storage/graphs.service.ts  |   2 +-
 .../src/app/services/storage/hosts.service.ts   |   2 +-
 .../app/services/storage/reducers.service.ts    |   2 +
 .../storage/service-logs-fields.service.ts      |   2 +-
 .../service-logs-histogram-data.service.ts      |   2 +-
 .../storage/service-logs-truncated.service.ts   |  32 +
 .../services/storage/service-logs.service.ts    |   2 +-
 .../services/storage/user-configs.service.ts    |   2 +-
 .../src/assets/i18n/en.json                     |  14 +-
 .../ambari-logsearch-web/webpack.config.js      |  21 +-
 ambari-logsearch/ambari-logsearch-web/yarn.lock | 288 +++++--
 ambari-metrics/ambari-metrics-assembly/pom.xml  |   2 +
 ambari-metrics/ambari-metrics-common/pom.xml    |   6 +
 .../flume/FlumeTimelineMetricsSinkTest.java     |  27 +-
 .../ambari-metrics-timelineservice/pom.xml      |  10 +
 ambari-server/pom.xml                           |   2 +
 ambari-server/src/main/assemblies/server.xml    |  30 +-
 .../actionmanager/ExecutionCommandWrapper.java  |   3 +-
 .../ambari/server/actionmanager/Stage.java      |   3 +-
 .../ambari/server/agent/HeartbeatMonitor.java   |   3 +-
 .../ambari/server/agent/StatusCommand.java      |  16 +
 .../agent/stomp/dto/HostRepositories.java       |  19 +
 .../server/api/services/AmbariMetaInfo.java     |  60 +-
 .../server/checks/AbstractCheckDescriptor.java  |  81 +-
 .../ambari/server/checks/CheckDescription.java  |   8 +
 .../RequiredServicesInRepositoryCheck.java      |  91 +++
 .../ambari/server/checks/UpgradeCheck.java      |  11 +
 .../AmbariCustomCommandExecutionHelper.java     |  10 +
 .../controller/AmbariManagementController.java  |   6 +-
 .../AmbariManagementControllerImpl.java         |  69 +-
 .../controller/AmbariManagementHelper.java      |  28 +-
 .../server/controller/ControllerModule.java     |   4 +
 .../server/controller/KerberosHelper.java       |   9 +-
 .../server/controller/KerberosHelperImpl.java   | 252 +++---
 .../controller/ResourceProviderFactory.java     |  10 +-
 .../ServiceComponentHostResponse.java           |  15 +
 .../server/controller/StackVersionResponse.java |  29 -
 .../AbstractControllerResourceProvider.java     |   4 +
 .../internal/AlertTargetResourceProvider.java   |   3 +-
 .../BlueprintConfigurationProcessor.java        |  87 +-
 .../internal/ClientConfigResourceProvider.java  |   2 -
 .../ClusterStackVersionResourceProvider.java    |  31 +-
 .../internal/DefaultProviderModule.java         |   4 -
 .../internal/ExtensionLinkResourceProvider.java |  15 +
 .../internal/HostComponentResourceProvider.java |   4 +
 .../internal/ServiceResourceProvider.java       |   8 -
 .../internal/StackArtifactResourceProvider.java |  35 +-
 .../internal/UpgradeResourceProvider.java       |  77 +-
 .../internal/ViewInstanceResourceProvider.java  | 147 ++--
 .../utilities/RemovableIdentities.java          |   2 +-
 .../upgrade/HostVersionOutOfSyncListener.java   |  15 +
 .../ambari/server/metadata/ActionMetadata.java  |   1 +
 .../ambari/server/orm/DBAccessorImpl.java       |  16 +-
 .../orm/entities/RepositoryVersionEntity.java   |  39 +-
 .../orm/entities/ServiceDesiredStateEntity.java |   6 +-
 .../server/orm/entities/WidgetLayoutEntity.java |   6 +-
 .../AbstractPrepareKerberosServerAction.java    |  14 +
 .../kerberos/CreateKeytabFilesServerAction.java |   2 +-
 .../kerberos/CreatePrincipalsServerAction.java  |   2 +-
 .../kerberos/KerberosServerAction.java          |  71 +-
 .../PrepareDisableKerberosServerAction.java     |   3 +-
 .../PrepareEnableKerberosServerAction.java      |   6 +-
 .../PrepareKerberosIdentitiesServerAction.java  | 142 +++-
 .../upgrades/AbstractUpgradeServerAction.java   |   6 +-
 .../AtlasProxyUserConfigCalculation.java        |  62 ++
 .../upgrades/AutoSkipFailedSummaryAction.java   |  15 +-
 .../upgrades/ComponentVersionCheckAction.java   |   2 +-
 .../serveraction/upgrades/ConfigureAction.java  |  16 +-
 .../upgrades/FinalizeUpgradeAction.java         |  23 +-
 .../FixCapacitySchedulerOrderingPolicy.java     |  12 +-
 .../serveraction/upgrades/FixLzoCodecPath.java  |  10 +-
 .../upgrades/FixOozieAdminUsers.java            |  10 +-
 .../upgrades/FixYarnWebServiceUrl.java          |  11 +-
 .../upgrades/HBaseConfigCalculation.java        |  10 +-
 .../HBaseEnvMaxDirectMemorySizeAction.java      |  11 +-
 .../upgrades/HiveEnvClasspathAction.java        |  11 +-
 .../upgrades/HiveZKQuorumConfigAction.java      |  13 +-
 .../upgrades/KerberosKeytabsAction.java         |  13 +-
 .../upgrades/ManualStageAction.java             |   3 +-
 .../upgrades/OozieConfigCalculation.java        |  11 +-
 .../upgrades/PreconfigureKerberosAction.java    |  15 +-
 .../upgrades/RangerConfigCalculation.java       |  11 +-
 .../RangerKerberosConfigCalculation.java        |  11 +-
 .../upgrades/RangerKmsProxyConfig.java          |  11 +-
 .../RangerUsersyncConfigCalculation.java        |  11 +-
 .../upgrades/RangerWebAlertConfigAction.java    |   9 +-
 .../upgrades/SparkShufflePropertyConfig.java    |  11 +-
 .../upgrades/UpdateDesiredRepositoryAction.java |  10 +-
 .../upgrades/UpgradeUserKerberosDescriptor.java |   6 +-
 .../upgrades/YarnConfigCalculation.java         |  11 +-
 .../ambari/server/stack/ExtensionHelper.java    |  88 ++-
 .../ambari/server/stack/ServiceDirectory.java   |   3 +-
 .../ambari/server/stack/StackDirectory.java     |  74 +-
 .../ambari/server/stack/StackManager.java       |   5 +
 .../apache/ambari/server/stack/StackModule.java |  49 +-
 .../ambari/server/state/ConfigHelper.java       | 164 +++-
 .../org/apache/ambari/server/state/Host.java    |  17 +
 .../ambari/server/state/PropertyInfo.java       |  29 +
 .../ambari/server/state/RefreshCommand.java     |  52 ++
 .../state/RefreshCommandConfiguration.java      |  71 ++
 .../ambari/server/state/RepositoryType.java     |  18 +
 .../apache/ambari/server/state/ServiceInfo.java |   2 +-
 .../apache/ambari/server/state/StackInfo.java   |  49 +-
 .../ambari/server/state/UpgradeContext.java     |  79 +-
 .../ambari/server/state/host/HostImpl.java      |  43 +
 .../kerberos/AbstractKerberosDescriptor.java    |  25 +
 .../AbstractKerberosDescriptorContainer.java    |  16 +-
 .../state/kerberos/KerberosDescriptor.java      |   3 +-
 .../kerberos/KerberosIdentityDescriptor.java    | 160 ++++
 .../server/state/repository/StackPackage.java   |  69 ++
 .../state/repository/VersionDefinitionXml.java  |  87 ++
 .../ambari/server/state/stack/UpgradePack.java  |  36 +-
 .../state/stack/upgrade/ClusterGrouping.java    |  13 +
 .../state/stack/upgrade/ConfigureTask.java      |  16 +
 .../stack/upgrade/ServerSideActionTask.java     |  15 +
 .../state/stack/upgrade/TaskParameter.java      |  41 +
 .../svccomphost/ServiceComponentHostImpl.java   |  19 +-
 .../ServiceComponentHostSummary.java            | 123 ---
 .../server/topology/AsyncCallableService.java   | 110 ++-
 .../ambari/server/topology/TopologyManager.java |  26 +-
 .../topology/tasks/ConfigureClusterTask.java    | 124 +--
 .../server/upgrade/UpgradeCatalog260.java       |  25 +
 .../ambari_server/dbConfiguration_linux.py      |   9 +-
 .../python/ambari_server/resourceFilesKeeper.py |   7 +-
 .../python/ambari_server/serverConfiguration.py |   2 +-
 .../main/python/ambari_server/setupMpacks.py    |   2 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   8 +-
 .../1.6.1.2.2.0/configuration/accumulo-env.xml  |   6 +
 .../package/scripts/accumulo_script.py          |   1 +
 .../0.1.0/configuration/infra-solr-env.xml      |   6 +
 .../0.1.0/configuration/ams-env.xml             |   6 +
 .../0.1.0/package/scripts/metrics_collector.py  |   4 +-
 .../0.1.0/package/scripts/metrics_grafana.py    |   4 +-
 .../0.1.0/package/scripts/metrics_monitor.py    |   4 +-
 .../AMBARI_METRICS/0.1.0/service_advisor.py     | 787 +++++++++++++++++++
 .../ATLAS/0.1.0.2.3/configuration/atlas-env.xml |   6 +
 .../ATLAS/0.7.0.3.0/configuration/atlas-env.xml |   6 +
 .../ATLAS/0.7.0.3.0/service_advisor.py          |   5 +-
 .../DRUID/0.10.1/configuration/druid-broker.xml | 106 +++
 .../DRUID/0.10.1/configuration/druid-common.xml | 270 +++++++
 .../0.10.1/configuration/druid-coordinator.xml  |  43 +
 .../DRUID/0.10.1/configuration/druid-env.xml    | 248 ++++++
 .../0.10.1/configuration/druid-historical.xml   |  94 +++
 .../DRUID/0.10.1/configuration/druid-log4j.xml  |  84 ++
 .../0.10.1/configuration/druid-logrotate.xml    |  68 ++
 .../configuration/druid-middlemanager.xml       | 122 +++
 .../0.10.1/configuration/druid-overlord.xml     |  52 ++
 .../DRUID/0.10.1/configuration/druid-router.xml |  59 ++
 .../common-services/DRUID/0.10.1/metainfo.xml   | 223 ++++++
 .../DRUID/0.10.1/package/scripts/broker.py      |  28 +
 .../DRUID/0.10.1/package/scripts/coordinator.py |  28 +
 .../DRUID/0.10.1/package/scripts/druid.py       | 307 ++++++++
 .../DRUID/0.10.1/package/scripts/druid_node.py  | 115 +++
 .../DRUID/0.10.1/package/scripts/historical.py  |  28 +
 .../0.10.1/package/scripts/middlemanager.py     |  28 +
 .../DRUID/0.10.1/package/scripts/overlord.py    |  28 +
 .../DRUID/0.10.1/package/scripts/params.py      | 200 +++++
 .../DRUID/0.10.1/package/scripts/router.py      |  28 +
 .../0.10.1/package/scripts/service_check.py     |  44 ++
 .../0.10.1/package/scripts/status_params.py     |  24 +
 .../DRUID/0.10.1/quicklinks/quicklinks.json     |  37 +
 .../DRUID/0.10.1/role_command_order.json        |  17 +
 .../DRUID/0.10.1/themes/theme.json              | 120 +++
 .../DRUID/0.9.2/configuration/druid-broker.xml  | 100 ---
 .../DRUID/0.9.2/configuration/druid-common.xml  | 270 -------
 .../0.9.2/configuration/druid-coordinator.xml   |  43 -
 .../DRUID/0.9.2/configuration/druid-env.xml     | 242 ------
 .../0.9.2/configuration/druid-historical.xml    |  88 ---
 .../DRUID/0.9.2/configuration/druid-log4j.xml   |  84 --
 .../0.9.2/configuration/druid-logrotate.xml     |  68 --
 .../0.9.2/configuration/druid-middlemanager.xml | 122 ---
 .../0.9.2/configuration/druid-overlord.xml      |  52 --
 .../DRUID/0.9.2/configuration/druid-router.xml  |  59 --
 .../common-services/DRUID/0.9.2/metainfo.xml    | 223 ------
 .../DRUID/0.9.2/package/scripts/broker.py       |  28 -
 .../DRUID/0.9.2/package/scripts/coordinator.py  |  28 -
 .../DRUID/0.9.2/package/scripts/druid.py        | 307 --------
 .../DRUID/0.9.2/package/scripts/druid_node.py   | 114 ---
 .../DRUID/0.9.2/package/scripts/historical.py   |  28 -
 .../0.9.2/package/scripts/middlemanager.py      |  28 -
 .../DRUID/0.9.2/package/scripts/overlord.py     |  28 -
 .../DRUID/0.9.2/package/scripts/params.py       | 200 -----
 .../DRUID/0.9.2/package/scripts/router.py       |  28 -
 .../0.9.2/package/scripts/service_check.py      |  44 --
 .../0.9.2/package/scripts/status_params.py      |  24 -
 .../DRUID/0.9.2/quicklinks/quicklinks.json      |  37 -
 .../DRUID/0.9.2/role_command_order.json         |  17 -
 .../DRUID/0.9.2/themes/theme.json               | 120 ---
 .../0.5.0.2.1/configuration/falcon-env.xml      |   6 +
 .../FLUME/1.4.0.2.0/configuration/flume-env.xml |   6 +
 .../0.96.0.2.0/configuration/hbase-env.xml      |   6 +
 .../0.96.0.2.0/configuration/hbase-site.xml     |  17 +-
 .../HBASE/2.0.0.3.0/configuration/hbase-env.xml |   6 +
 .../HDFS/2.1.0.2.0/configuration/core-site.xml  |  12 +
 .../HDFS/2.1.0.2.0/configuration/hadoop-env.xml |  10 +
 .../HDFS/2.1.0.2.0/configuration/hdfs-site.xml  |   4 +
 .../HDFS/2.1.0.2.0/package/scripts/datanode.py  |  13 +-
 .../HDFS/2.1.0.2.0/package/scripts/hdfs.py      |  62 +-
 .../2.1.0.2.0/package/scripts/hdfs_client.py    |   5 +
 .../2.1.0.2.0/package/scripts/hdfs_namenode.py  |  21 +
 .../2.1.0.2.0/package/scripts/install_params.py |   6 -
 .../HDFS/2.1.0.2.0/package/scripts/namenode.py  |  21 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |   2 -
 .../HDFS/2.1.0.2.0/package/scripts/snamenode.py |  10 +
 .../HDFS/3.0.0.3.0/configuration/hadoop-env.xml |  10 +
 .../HDFS/3.0.0.3.0/configuration/hdfs-site.xml  |   7 +
 .../HDFS/3.0.0.3.0/package/scripts/datanode.py  |  13 +-
 .../HDFS/3.0.0.3.0/package/scripts/hdfs.py      |  62 +-
 .../3.0.0.3.0/package/scripts/hdfs_client.py    |   5 +
 .../3.0.0.3.0/package/scripts/hdfs_namenode.py  |  20 +
 .../3.0.0.3.0/package/scripts/install_params.py |   6 -
 .../HDFS/3.0.0.3.0/package/scripts/namenode.py  |  21 +-
 .../3.0.0.3.0/package/scripts/params_linux.py   |   2 -
 .../HDFS/3.0.0.3.0/package/scripts/snamenode.py |  10 +
 .../HIVE/0.12.0.2.0/configuration/hive-env.xml  |  12 +
 .../HIVE/0.12.0.2.0/configuration/hive-site.xml | 228 +-----
 .../HIVE/0.12.0.2.0/package/scripts/hive.py     |  34 +-
 .../package/scripts/hive_interactive.py         |  13 +
 .../package/scripts/hive_server_upgrade.py      |   5 -
 .../0.12.0.2.0/package/scripts/params_linux.py  |   9 +-
 .../0.12.0.2.0/package/scripts/status_params.py |   8 +-
 .../HIVE/0.12.0.2.0/package/scripts/webhcat.py  |   2 +-
 .../HIVE/2.1.0.3.0/configuration/hive-env.xml   |  12 +
 .../HIVE/2.1.0.3.0/configuration/hive-site.xml  |  48 ++
 .../HIVE/2.1.0.3.0/package/scripts/hive.py      |  32 +
 .../package/scripts/hive_interactive.py         |  13 +
 .../2.1.0.3.0/package/scripts/params_linux.py   |   4 +
 .../0.10.0.3.0/configuration/kafka-env.xml      |   6 +
 .../KAFKA/0.8.1/configuration/kafka-env.xml     |   6 +
 .../KNOX/0.5.0.2.2/configuration/knox-env.xml   |  10 +
 .../KNOX/0.5.0.3.0/configuration/knox-env.xml   |  10 +
 .../0.5.0/configuration/logsearch-env.xml       |   6 +
 .../1.0.0.2.3/configuration/mahout-env.xml      |   6 +
 .../OOZIE/4.0.0.2.0/configuration/oozie-env.xml |  10 +
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py    |   6 +-
 .../4.0.0.2.0/package/scripts/params_linux.py   |   3 -
 .../OOZIE/4.2.0.3.0/configuration/oozie-env.xml |  10 +
 .../OOZIE/4.2.0.3.0/package/scripts/oozie.py    |   5 +-
 .../4.2.0.3.0/package/scripts/params_linux.py   |   3 -
 .../RANGER/0.4.0/configuration/ranger-env.xml   |  10 +
 .../0.4.0/package/scripts/ranger_admin.py       |  36 +-
 .../0.4.0/package/scripts/ranger_tagsync.py     |  23 +-
 .../1.0.0.3.0/configuration/ranger-env.xml      |  10 +
 .../1.0.0.3.0/package/scripts/ranger_admin.py   |  24 +-
 .../1.0.0.3.0/package/scripts/ranger_tagsync.py |  23 +-
 .../0.5.0.2.3/configuration/kms-env.xml         |  10 +
 .../0.5.0.2.3/package/scripts/kms_server.py     |  21 +-
 .../1.0.0.3.0/configuration/kms-env.xml         |  10 +
 .../1.0.0.3.0/package/scripts/kms_server.py     |  19 +-
 .../RANGER_KMS/1.0.0.3.0/service_advisor.py     |   3 +-
 .../SPARK/1.2.1/configuration/spark-env.xml     |  10 +
 .../SPARK/2.2.0/configuration/livy-env.xml      |  10 +
 .../SPARK/2.2.0/configuration/spark-env.xml     |  10 +
 .../SPARK2/2.0.0/configuration/spark2-env.xml   |  10 +
 .../SQOOP/1.4.4.2.0/configuration/sqoop-env.xml |   6 +
 .../SQOOP/1.4.4.3.0/configuration/sqoop-env.xml |   6 +
 .../STORM/0.9.1/configuration/storm-env.xml     |   6 +
 .../package/scripts/supervisord_service.py      |   1 +
 .../STORM/1.0.1.3.0/configuration/storm-env.xml |   6 +
 .../package/scripts/supervisord_service.py      |   1 +
 .../0.15.0/configuration/superset-env.xml       |   6 +
 .../TEZ/0.4.0.2.1/configuration/tez-env.xml     |  10 +
 .../TEZ/0.9.0.3.0/configuration/tez-env.xml     |  10 +
 .../configuration-mapred/mapred-env.xml         |   6 +
 .../YARN/2.1.0.2.0/configuration/yarn-env.xml   |   6 +
 .../2.1.0.2.0/package/scripts/params_linux.py   |  34 +-
 .../2.1.0.2.0/package/scripts/status_params.py  |  52 +-
 .../configuration-mapred/mapred-env.xml         |   6 +
 .../YARN/3.0.0.3.0/configuration/yarn-env.xml   |   6 +
 .../YARN/3.0.0.3.0/configuration/yarn-site.xml  |   2 +-
 .../3.0.0.3.0/package/scripts/params_linux.py   |  53 +-
 .../3.0.0.3.0/package/scripts/status_params.py  |  52 +-
 .../YARN/3.0.0.3.0/service_advisor.py           |   3 +-
 .../0.6.0/configuration/zeppelin-env.xml        |  10 +
 .../0.7.0/configuration/zeppelin-env.xml        |  10 +
 .../ZEPPELIN/0.7.0/package/scripts/master.py    | 167 ++--
 .../ZEPPELIN/0.7.0/package/scripts/params.py    |  26 +-
 .../3.4.5/configuration/zookeeper-env.xml       |   6 +
 .../src/main/resources/configuration-schema.xsd |  14 +-
 .../custom_actions/scripts/install_packages.py  |  35 +-
 ambari-server/src/main/resources/kerberos.json  |  79 ++
 .../src/main/resources/properties.json          |   1 +
 .../main/resources/scripts/Ambaripreupload.py   |   4 +-
 .../scripts/post-user-creation-hook.sh          |   2 +-
 .../stack-hooks/after-INSTALL/scripts/hook.py   |  37 +
 .../stack-hooks/after-INSTALL/scripts/params.py | 108 +++
 .../scripts/shared_initialization.py            | 132 ++++
 .../before-ANY/files/changeToSecureUid.sh       |  64 ++
 .../stack-hooks/before-ANY/scripts/hook.py      |  36 +
 .../stack-hooks/before-ANY/scripts/params.py    | 254 ++++++
 .../before-ANY/scripts/shared_initialization.py | 273 +++++++
 .../stack-hooks/before-INSTALL/scripts/hook.py  |  37 +
 .../before-INSTALL/scripts/params.py            | 115 +++
 .../scripts/repo_initialization.py              |  73 ++
 .../scripts/shared_initialization.py            |  37 +
 .../stack-hooks/before-RESTART/scripts/hook.py  |  29 +
 .../before-START/files/checkForFormat.sh        |  65 ++
 .../before-START/files/fast-hdfs-resource.jar   | Bin 0 -> 28296600 bytes
 .../before-START/files/task-log4j.properties    | 134 ++++
 .../before-START/files/topology_script.py       |  66 ++
 .../before-START/scripts/custom_extensions.py   | 173 ++++
 .../stack-hooks/before-START/scripts/hook.py    |  43 +
 .../stack-hooks/before-START/scripts/params.py  | 371 +++++++++
 .../before-START/scripts/rack_awareness.py      |  48 ++
 .../scripts/shared_initialization.py            | 256 ++++++
 .../templates/commons-logging.properties.j2     |  43 +
 .../templates/exclude_hosts_list.j2             |  21 +
 .../templates/hadoop-metrics2.properties.j2     | 107 +++
 .../before-START/templates/health_check.j2      |  81 ++
 .../templates/include_hosts_list.j2             |  21 +
 .../templates/topology_mappings.data.j2         |  24 +
 .../HDP/2.0.6/configuration/cluster-env.xml     |  10 +
 .../2.0.6/hooks/after-INSTALL/scripts/hook.py   |  37 -
 .../2.0.6/hooks/after-INSTALL/scripts/params.py | 115 ---
 .../scripts/shared_initialization.py            | 132 ----
 .../hooks/before-ANY/files/changeToSecureUid.sh |  64 --
 .../HDP/2.0.6/hooks/before-ANY/scripts/hook.py  |  36 -
 .../2.0.6/hooks/before-ANY/scripts/params.py    | 277 -------
 .../before-ANY/scripts/shared_initialization.py | 281 -------
 .../2.0.6/hooks/before-INSTALL/scripts/hook.py  |  37 -
 .../hooks/before-INSTALL/scripts/params.py      | 115 ---
 .../scripts/repo_initialization.py              |  73 --
 .../scripts/shared_initialization.py            |  37 -
 .../2.0.6/hooks/before-RESTART/scripts/hook.py  |  29 -
 .../hooks/before-START/files/checkForFormat.sh  |  65 --
 .../before-START/files/fast-hdfs-resource.jar   | Bin 28296600 -> 0 bytes
 .../before-START/files/task-log4j.properties    | 134 ----
 .../hooks/before-START/files/topology_script.py |  66 --
 .../before-START/scripts/custom_extensions.py   | 173 ----
 .../2.0.6/hooks/before-START/scripts/hook.py    |  43 -
 .../2.0.6/hooks/before-START/scripts/params.py  | 378 ---------
 .../before-START/scripts/rack_awareness.py      |  48 --
 .../scripts/shared_initialization.py            | 256 ------
 .../templates/commons-logging.properties.j2     |  43 -
 .../templates/exclude_hosts_list.j2             |  21 -
 .../templates/hadoop-metrics2.properties.j2     | 107 ---
 .../before-START/templates/health_check.j2      |  81 --
 .../templates/include_hosts_list.j2             |  21 -
 .../templates/topology_mappings.data.j2         |  24 -
 .../resources/stacks/HDP/2.0.6/kerberos.json    |  79 --
 .../HDP/2.0.6/properties/stack_packages.json    |   8 +-
 .../stacks/HDP/2.0.6/role_command_order.json    |   2 +-
 .../stacks/HDP/2.0.6/services/stack_advisor.py  | 562 +------------
 .../resources/stacks/HDP/2.0.6/widgets.json     |  95 ---
 .../services/HBASE/configuration/hbase-env.xml  |   6 +
 .../stacks/HDP/2.2/services/stack_advisor.py    |   1 -
 .../services/ECS/package/scripts/ecs_client.py  |   2 +-
 .../services/ECS/package/scripts/params.py      |   2 +-
 .../services/HDFS/configuration/hdfs-site.xml   |   3 +
 .../stacks/HDP/2.3/upgrades/config-upgrade.xml  |   5 +
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml |   3 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.6.xml     |   1 +
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  |   4 +
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml |   3 +
 .../stacks/HDP/2.4/upgrades/upgrade-2.6.xml     |   1 +
 .../services/ATLAS/configuration/atlas-env.xml  |   6 +
 .../HIVE/configuration/llap-daemon-log4j.xml    |   2 +-
 .../services/SPARK/configuration/livy-env.xml   |  10 +
 .../stacks/HDP/2.5/services/stack_advisor.py    |   6 +-
 .../stacks/HDP/2.5/upgrades/config-upgrade.xml  |  10 +
 .../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml |   7 +
 .../stacks/HDP/2.5/upgrades/upgrade-2.6.xml     |   4 +
 .../stacks/HDP/2.6/kerberos_preconfigure.json   |   9 +
 .../configuration/application-properties.xml    |  15 +
 .../stacks/HDP/2.6/services/DRUID/kerberos.json |  71 +-
 .../stacks/HDP/2.6/services/DRUID/metainfo.xml  |   5 +-
 .../configuration/hive-interactive-site.xml     | 225 ++++++
 .../HIVE/configuration/tez-interactive-site.xml |   6 +
 .../services/SPARK/configuration/livy-env.xml   |  10 +
 .../services/STORM/configuration/storm-site.xml |   6 +-
 .../HDP/2.6/services/SUPERSET/kerberos.json     |  10 +-
 .../services/YARN/configuration/yarn-site.xml   |  20 +-
 .../ZEPPELIN/configuration/zeppelin-env.xml     |  10 +
 .../stacks/HDP/2.6/services/stack_advisor.py    |  48 +-
 .../stacks/HDP/2.6/upgrades/config-upgrade.xml  |  32 +-
 .../HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml |  15 +-
 .../stacks/HDP/2.6/upgrades/upgrade-2.6.xml     |   5 +
 .../HDP/3.0/configuration/cluster-env.xml       |  10 +
 .../HDP/3.0/hooks/after-INSTALL/scripts/hook.py |  37 -
 .../3.0/hooks/after-INSTALL/scripts/params.py   | 109 ---
 .../scripts/shared_initialization.py            | 140 ----
 .../hooks/before-ANY/files/changeToSecureUid.sh |  53 --
 .../HDP/3.0/hooks/before-ANY/scripts/hook.py    |  36 -
 .../HDP/3.0/hooks/before-ANY/scripts/params.py  | 259 ------
 .../before-ANY/scripts/shared_initialization.py | 239 ------
 .../3.0/hooks/before-INSTALL/scripts/hook.py    |  37 -
 .../3.0/hooks/before-INSTALL/scripts/params.py  | 115 ---
 .../scripts/repo_initialization.py              |  76 --
 .../scripts/shared_initialization.py            |  37 -
 .../3.0/hooks/before-RESTART/scripts/hook.py    |  29 -
 .../hooks/before-START/files/checkForFormat.sh  |  65 --
 .../before-START/files/fast-hdfs-resource.jar   | Bin 28296600 -> 0 bytes
 .../before-START/files/task-log4j.properties    | 134 ----
 .../hooks/before-START/files/topology_script.py |  66 --
 .../HDP/3.0/hooks/before-START/scripts/hook.py  |  40 -
 .../3.0/hooks/before-START/scripts/params.py    | 364 ---------
 .../before-START/scripts/rack_awareness.py      |  47 --
 .../scripts/shared_initialization.py            | 249 ------
 .../templates/commons-logging.properties.j2     |  43 -
 .../templates/exclude_hosts_list.j2             |  21 -
 .../templates/hadoop-metrics2.properties.j2     | 107 ---
 .../before-START/templates/health_check.j2      |  81 --
 .../templates/include_hosts_list.j2             |  21 -
 .../templates/topology_mappings.data.j2         |  24 -
 .../main/resources/stacks/HDP/3.0/kerberos.json |  79 --
 .../stacks/HDP/3.0/role_command_order.json      |   2 +-
 .../main/resources/stacks/HDP/3.0/widgets.json  |  95 ---
 .../src/main/resources/upgrade-pack.xsd         |   9 +
 ambari-server/src/main/resources/widgets.json   |  95 +++
 .../server/api/services/AmbariMetaInfoTest.java |  44 +-
 .../checks/AbstractCheckDescriptorTest.java     |   3 +
 .../checks/AutoStartDisabledCheckTest.java      |  12 +-
 .../checks/ClientRetryPropertyCheckTest.java    |   2 +
 .../checks/ComponentExistsInRepoCheckTest.java  |   2 +
 .../checks/ComponentsInstallationCheckTest.java |   3 +
 .../checks/ConfigurationMergeCheckTest.java     |   2 +
 .../checks/DruidHighAvailabilityCheckTest.java  |   2 +
 .../checks/HiveMultipleMetastoreCheckTest.java  |   3 +
 .../checks/HiveNotRollingWarningTest.java       |   3 +
 .../server/checks/HostsHeartbeatCheckTest.java  |   2 +
 .../checks/HostsMasterMaintenanceCheckTest.java |   3 +
 .../checks/HostsRepositoryVersionCheckTest.java |   2 +
 .../server/checks/InstallPackagesCheckTest.java |   2 +
 .../server/checks/KafkaKerberosCheckTest.java   |   2 +
 ...duce2JobHistoryStatePreservingCheckTest.java |   2 +
 .../server/checks/RangerAuditDbCheckTest.java   |   3 +
 .../server/checks/RangerPasswordCheckTest.java  |   3 +
 .../server/checks/RangerSSLConfigCheckTest.java |   3 +
 .../RequiredServicesInRepositoryCheckTest.java  | 115 +++
 .../SecondaryNamenodeDeletedCheckTest.java      |   3 +
 .../ServicesMaintenanceModeCheckTest.java       |   2 +
 ...vicesMapReduceDistributedCacheCheckTest.java |   3 +
 ...rvicesNamenodeHighAvailabilityCheckTest.java |   3 +
 .../ServicesNamenodeTruncateCheckTest.java      |   2 +
 .../ServicesTezDistributedCacheCheckTest.java   |   3 +
 .../server/checks/ServicesUpCheckTest.java      |   2 +
 .../ServicesYarnWorkPreservingCheckTest.java    |   2 +
 .../server/checks/StormShutdownWarningTest.java |   3 +
 .../AmbariManagementControllerImplTest.java     |   6 +-
 .../AmbariManagementControllerTest.java         |   6 +
 .../server/controller/KerberosHelperTest.java   |   5 -
 .../BlueprintConfigurationProcessorTest.java    |  60 +-
 .../internal/UpgradeResourceProviderTest.java   |  46 +-
 .../utilities/KerberosIdentityCleanerTest.java  |   8 +-
 .../HostVersionOutOfSyncListenerTest.java       |  14 +-
 .../upgrade/StackVersionListenerTest.java       |   1 -
 .../apache/ambari/server/orm/OrmTestHelper.java |   2 +-
 ...AbstractPrepareKerberosServerActionTest.java |   5 +
 .../AtlasProxyUserConfigCalculationTest.java    | 127 +++
 .../FixCapacitySchedulerOrderingPolicyTest.java |   2 +-
 .../upgrades/FixOozieAdminUsersTest.java        |   2 +-
 .../upgrades/FixYarnWebServiceUrlTest.java      |   2 +-
 .../HBaseEnvMaxDirectMemorySizeActionTest.java  |   2 +-
 .../upgrades/HiveEnvClasspathActionTest.java    |   2 +-
 .../upgrades/HiveZKQuorumConfigActionTest.java  |   2 +-
 .../upgrades/KerberosKeytabsActionTest.java     |  17 +
 .../upgrades/RangerConfigCalculationTest.java   |   2 +-
 .../RangerKerberosConfigCalculationTest.java    |   2 +-
 .../upgrades/RangerKmsProxyConfigTest.java      |   2 +-
 .../RangerUsersyncConfigCalculationTest.java    |   2 +-
 .../RangerWebAlertConfigActionTest.java         |   6 +-
 .../SparkShufflePropertyConfigTest.java         |   2 +-
 .../UpgradeUserKerberosDescriptorTest.java      |   4 +-
 .../server/stack/StackManagerExtensionTest.java |  49 +-
 .../ambari/server/stack/StackManagerTest.java   |   8 -
 .../ambari/server/state/CheckHelperTest.java    |   7 +
 .../ambari/server/state/ConfigHelperTest.java   |  76 +-
 .../ambari/server/state/PropertyInfoTest.java   |  20 +
 .../ambari/server/state/UpgradeContextTest.java |  96 ++-
 .../state/kerberos/KerberosDescriptorTest.java  | 198 ++++-
 .../KerberosDescriptorUpdateHelperTest.java     |   1 +
 .../kerberos/VariableReplacementHelperTest.java |  35 +-
 .../state/stack/ConfigUpgradeValidityTest.java  |  10 +
 .../svccomphost/ServiceComponentHostTest.java   | 102 ++-
 .../topology/AsyncCallableServiceTest.java      |  89 +--
 .../ClusterDeployWithStartOnlyTest.java         |   6 +
 ...InstallWithoutStartOnComponentLevelTest.java |   6 +
 .../ClusterInstallWithoutStartTest.java         |   6 +
 .../topology/ConfigureClusterTaskTest.java      |  64 +-
 .../server/topology/TopologyManagerTest.java    |  14 +-
 .../src/test/python/TestAmbariServer.py         |   4 +-
 ambari-server/src/test/python/TestMpacks.py     |  12 +-
 .../src/test/python/TestResourceFilesKeeper.py  |   1 +
 .../src/test/python/TestUpgradeSummary.py       |   6 +-
 .../AMBARI_METRICS/test_service_advisor.py      | 596 ++++++++++++++
 .../HIVE/test_jdbc_driver_config.py             |  18 +-
 .../RANGER/test_db_flavor_config.py             |  17 +-
 .../RANGER_KMS/test_db_flavor_config.py         |  17 +-
 .../SQOOP/test_jdbc_driver_config.py            |  16 +-
 .../configs/ranger_admin_default.json           |  55 ++
 .../custom_actions/TestInstallPackages.py       |  50 +-
 .../stacks/2.0.6/HBASE/test_hbase_client.py     |   1 -
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |   8 +-
 .../2.0.6/HBASE/test_phoenix_queryserver.py     |   7 +
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |  55 +-
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |  38 +-
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    |  42 +-
 .../stacks/2.0.6/HIVE/test_hive_server.py       | 148 ++--
 .../2.0.6/HIVE/test_hive_service_check.py       |   4 +-
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    |  41 +-
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |   9 +
 .../2.0.6/OOZIE/test_oozie_service_check.py     |   5 +-
 .../stacks/2.0.6/YARN/test_historyserver.py     |   5 +-
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |   1 +
 .../stacks/2.0.6/common/test_stack_advisor.py   | 576 --------------
 .../python/stacks/2.0.6/configs/default.json    |   1 +
 .../hooks/after-INSTALL/test_after_install.py   |  88 +--
 .../2.0.6/hooks/before-ANY/test_before_any.py   |  24 +-
 .../hooks/before-INSTALL/test_before_install.py |  14 +-
 .../hooks/before-START/test_before_start.py     |  21 +-
 .../stacks/2.1/FALCON/test_falcon_server.py     |  26 +-
 .../stacks/2.1/HIVE/test_hive_metastore.py      |  54 +-
 .../stacks/2.2/PIG/test_pig_service_check.py    |  13 +
 .../stacks/2.2/SPARK/test_job_history_server.py |  18 +-
 .../stacks/2.2/common/test_conf_select.py       |  13 +-
 .../stacks/2.2/common/test_stack_advisor.py     | 511 ------------
 .../2.2/common/test_stack_advisor_perf.py       |  66 +-
 .../stacks/2.3/MAHOUT/test_mahout_client.py     |   2 +-
 .../2.3/MAHOUT/test_mahout_service_check.py     |  28 +-
 .../2.3/SPARK/test_spark_thrift_server.py       |   8 +-
 .../stacks/2.3/common/test_stack_advisor.py     |   2 +-
 .../stacks/2.5/HIVE/test_hive_server_int.py     |  16 +
 .../stacks/2.5/RANGER_KMS/test_kms_server.py    |  24 +-
 .../python/stacks/2.5/SPARK/test_spark_livy.py  |  16 +-
 .../stacks/2.5/ZEPPELIN/test_zeppelin_060.py    |  20 +-
 .../stacks/2.5/common/test_stack_advisor.py     |   5 +-
 .../stacks/2.5/configs/ranger-kms-secured.json  |   6 +-
 .../test/python/stacks/2.6/DRUID/test_druid.py  |  22 +-
 .../stacks/2.6/SPARK2/test_spark_livy2.py       |  16 +-
 .../stacks/2.6/ZEPPELIN/test_zeppelin_070.py    | 218 ++---
 .../stacks/2.6/common/test_stack_advisor.py     | 214 ++++-
 .../src/test/python/stacks/utils/RMFTestCase.py |  47 +-
 .../resources/extensions/EXT/0.2/metainfo.xml   |   4 +-
 .../resources/extensions/EXT/0.3/metainfo.xml   |   2 +-
 .../resources/extensions/EXT/0.4/metainfo.xml   |  32 +
 .../EXT/0.4/services/OOZIE2/metainfo.xml        | 118 +++
 .../services/OOZIE2/themes/broken_theme.json    |   3 +
 ambari-server/src/test/resources/kerberos.json  |  42 +
 .../services/HDFS/configuration/hdfs-site.xml   |   8 +
 .../resources/stacks/HDP/2.0.8/kerberos.json    |  42 -
 .../resources/stacks/OTHER/1.0/widgets.json     |  95 ---
 .../stacks_with_extensions/HDP/0.4/metainfo.xml |  22 +
 .../HDP/0.4/repos/repoinfo.xml                  |  63 ++
 .../HDP/0.4/services/HBASE/metainfo.xml         |  26 +
 .../0.4/services/HDFS/configuration/global.xml  | 145 ++++
 .../services/HDFS/configuration/hadoop-env.xml  | 223 ++++++
 .../services/HDFS/configuration/hbase-site.xml  | 137 ++++
 .../services/HDFS/configuration/hdfs-log4j.xml  | 199 +++++
 .../services/HDFS/configuration/hdfs-site.xml   | 396 ++++++++++
 .../HDP/0.4/services/HDFS/metainfo.xml          |  30 +
 .../0.4/services/HDFS/package/dummy-script.py   |  20 +
 .../HDP/0.4/services/HIVE/metainfo.xml          |  26 +
 .../HDP/0.4/services/MAPREDUCE/metainfo.xml     |  23 +
 .../HDP/0.4/services/ZOOKEEPER/metainfo.xml     |  26 +
 ambari-server/src/test/resources/widgets.json   |  95 +++
 ambari-utility/pom.xml                          |   6 +
 ambari-web/app/app.js                           |  24 +-
 ambari-web/app/assets/test/tests.js             |   2 +
 ambari-web/app/controllers.js                   |   1 +
 ambari-web/app/controllers/installer.js         |   8 +-
 .../journalNode/progress_controller.js          |   4 +-
 .../journalNode/step4_controller.js             |   6 +-
 .../nameNode/step5_controller.js                |   6 +-
 .../highAvailability/progress_controller.js     |   4 +-
 .../main/admin/kerberos/step2_controller.js     |   7 +-
 .../main/admin/stack_and_upgrade_controller.js  |   8 +-
 ambari-web/app/controllers/main/host/details.js |   2 -
 .../app/controllers/main/service/info/metric.js | 468 +++++++++++
 .../controllers/main/service/info/summary.js    | 450 +----------
 .../main/service/reassign/step3_controller.js   |  18 +-
 .../main/service/reassign/step4_controller.js   |   2 -
 .../service/widgets/create/wizard_controller.js |   2 +-
 .../app/controllers/wizard/step6_controller.js  |  15 +-
 .../app/controllers/wizard/step8_controller.js  |  15 +-
 .../app/mappers/repository_version_mapper.js    |   2 +-
 ambari-web/app/messages.js                      |  10 +-
 .../app/mixins/common/configs/configs_saver.js  |  26 +-
 .../main/service/configs/config_overridable.js  |   1 -
 .../mixins/wizard/assign_master_components.js   |   2 +-
 ambari-web/app/models/alerts/alert_group.js     |   2 +-
 .../models/configs/service_config_version.js    |   9 +-
 ambari-web/app/models/host_stack_version.js     |   4 +-
 .../models/stack_version/repository_version.js  |   2 +-
 ambari-web/app/routes/add_kerberos_routes.js    |  22 +-
 ambari-web/app/routes/installer.js              |   3 +-
 ambari-web/app/routes/main.js                   |   1 +
 ambari-web/app/styles/alerts.less               |  14 +-
 ambari-web/app/styles/application.less          |  15 +-
 ambari-web/app/styles/bootstrap_overrides.less  |   4 +
 ambari-web/app/styles/common.less               |   3 +-
 ambari-web/app/styles/dashboard.less            |  28 +-
 .../app/styles/enhanced_service_dashboard.less  |  26 +-
 ambari-web/app/styles/modal_popups.less         |   2 +-
 ambari-web/app/styles/stack_versions.less       |  13 +-
 .../app/styles/theme/bootstrap-ambari.css       |  78 +-
 ambari-web/app/styles/top-nav.less              |  27 +-
 ambari-web/app/styles/wizard.less               |  33 +-
 ambari-web/app/templates/application.hbs        |  39 +-
 ambari-web/app/templates/common/breadcrumbs.hbs |   4 +-
 .../common/configs/config_history_flow.hbs      |   8 +-
 .../common/configs/service_version_box.hbs      |   8 +-
 .../templates/common/host_progress_popup.hbs    |   6 +-
 .../modal_popups/widget_browser_popup.hbs       |   6 +-
 ambari-web/app/templates/experimental.hbs       |   6 +-
 ambari-web/app/templates/installer.hbs          |   2 +-
 .../stack_upgrade/stack_upgrade_wizard.hbs      |   2 +-
 .../main/admin/stack_upgrade/versions.hbs       |   6 +-
 ambari-web/app/templates/main/alerts.hbs        |   3 +-
 .../main/alerts/alert_definitions_actions.hbs   |   2 +-
 ambari-web/app/templates/main/dashboard.hbs     |   2 +-
 .../templates/main/dashboard/config_history.hbs |   2 +-
 ambari-web/app/templates/main/host.hbs          |   6 +-
 .../templates/main/host/bulk_operation_menu.hbs | 134 ++--
 .../main/host/details/host_component.hbs        | 176 +++--
 .../app/templates/main/host/stack_versions.hbs  |   4 +-
 ambari-web/app/templates/main/host/summary.hbs  | 120 +--
 .../app/templates/main/service/info/metrics.hbs | 104 +++
 .../main/service/info/service_alert_popup.hbs   |   6 +-
 .../app/templates/main/service/info/summary.hbs | 105 +--
 .../service/info/summary/master_components.hbs  |   4 -
 ambari-web/app/templates/main/service/item.hbs  |   7 +-
 .../templates/main/service/reassign/step3.hbs   |   6 +-
 .../templates/main/service/services/hdfs.hbs    |  38 +-
 ambari-web/app/templates/wizard/step6.hbs       |   2 +-
 ambari-web/app/templates/wizard/step9.hbs       |  12 +-
 ambari-web/app/utils/ajax/ajax.js               |   2 -
 ambari-web/app/utils/helper.js                  |  23 +
 ambari-web/app/views.js                         |   1 +
 .../stack_upgrade/upgrade_version_box_view.js   |  74 +-
 .../upgrade_version_column_view.js              |   2 +-
 .../admin/stack_upgrade/upgrade_wizard_view.js  |  26 +-
 .../views/main/host/hosts_table_menu_view.js    |   4 +-
 .../app/views/main/host/stack_versions_view.js  |   2 +-
 ambari-web/app/views/main/service/info/menu.js  |   7 +
 .../app/views/main/service/info/metrics_view.js | 290 +++++++
 .../app/views/main/service/info/summary.js      | 323 ++------
 ambari-web/app/views/main/service/item.js       |   8 +-
 .../app/views/main/service/services/hdfs.js     |   2 -
 ambari-web/test/controllers/installer_test.js   |  37 +-
 .../journalNode/progress_controller_test.js     |   2 -
 .../progress_controller_test.js                 |   3 -
 .../admin/kerberos/step2_controller_test.js     |   9 +-
 .../main/service/info/metric_test.js            | 110 +++
 .../main/service/info/summary_test.js           |  76 --
 .../mixins/common/configs/configs_saver_test.js |  28 +-
 .../wizard/assign_master_components_test.js     |   2 +-
 .../test/models/alerts/alert_group_test.js      |   5 +-
 .../stack_version/repository_version_test.js    |  12 +-
 .../upgrade_version_box_view_test.js            | 149 ++--
 .../upgrade_version_column_view_test.js         |   6 +
 .../stack_upgrade/upgrade_wizard_view_test.js   |  38 +
 .../main/service/info/metrics_view_test.js      | 334 ++++++++
 .../views/main/service/info/summary_test.js     | 281 +------
 ambari-web/test/views/main/service/item_test.js |   6 +-
 .../src/main/assemblies/hdf-ambari-mpack.xml    |   1 +
 .../HIVE/package/scripts/hive_client.py         |   2 -
 .../HIVE/package/scripts/hive_metastore.py      |   1 -
 .../HIVE/package/scripts/hive_server.py         |   2 +-
 .../package/scripts/hive_server_interactive.py  |   1 -
 .../HIVE/package/scripts/webhcat_server.py      |   2 -
 .../scripts/application_timeline_server.py      |   1 -
 .../YARN/package/scripts/historyserver.py       |   1 -
 .../YARN/package/scripts/mapreduce2_client.py   |   2 -
 .../YARN/package/scripts/nodemanager.py         |   1 -
 .../YARN/package/scripts/resourcemanager.py     |   1 -
 .../YARN/package/scripts/yarn_client.py         |   1 -
 .../ui/hive-web/app/adapters/application.js     |  18 +
 .../app/controllers/visualization-ui.js         |  10 +-
 .../ui/hive-web/app/utils/constants.js          |   1 +
 .../view/hive20/resources/browser/DDLProxy.java | 154 ++--
 .../savedQueries/SavedQueryResourceManager.java |  17 +-
 .../savedQueries/SavedQueryService.java         |  46 +-
 .../hive20/resources/uploads/UploadService.java |  12 +-
 .../uploads/query/InsertFromQueryInput.java     |  13 +-
 .../resources/ui/app/components/job-item.js     |  49 +-
 .../resources/ui/app/components/jobs-browser.js |   3 +
 .../ui/app/components/query-result-table.js     |   2 +-
 .../databases/database/tables/upload-table.js   |  12 +
 .../src/main/resources/ui/app/routes/jobs.js    |  11 +
 .../main/resources/ui/app/routes/queries/new.js |   2 +
 .../resources/ui/app/routes/queries/query.js    | 237 ++++--
 .../src/main/resources/ui/app/services/jobs.js  |  31 +-
 .../resources/ui/app/services/saved-queries.js  |  21 +
 .../src/main/resources/ui/app/styles/app.scss   |   8 +
 .../ui/app/templates/components/job-item.hbs    |   2 +-
 .../app/templates/components/jobs-browser.hbs   |   2 +-
 .../main/resources/ui/app/templates/jobs.hbs    |   1 +
 .../hive20/src/main/resources/ui/yarn.lock      |   2 +-
 .../apache/ambari/view/utils/hdfs/HdfsApi.java  |   2 +-
 docs/pom.xml                                    |  61 +-
 855 files changed, 21145 insertions(+), 16624 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-agent/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-agent/src/main/python/ambari_agent/FileCache.py
----------------------------------------------------------------------
diff --cc ambari-agent/src/main/python/ambari_agent/FileCache.py
index 17235bf,28912d1..6994044
--- a/ambari-agent/src/main/python/ambari_agent/FileCache.py
+++ b/ambari-agent/src/main/python/ambari_agent/FileCache.py
@@@ -82,11 -83,10 +82,10 @@@ class FileCache()
      Returns a base directory for hooks
      """
      try:
-       hooks_subpath = command['clusterLevelParams']['hooks_folder']
 -      hooks_path = command['commandParams']['hooks_folder']
++      hooks_path = command['clusterLevelParams']['hooks_folder']
      except KeyError:
        return None
-     subpath = os.path.join(self.STACKS_CACHE_DIRECTORY, hooks_subpath)
-     return self.provide_directory(self.cache_dir, subpath,
+     return self.provide_directory(self.cache_dir, hooks_path,
                                    server_url_prefix)
  
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-agent/src/main/python/ambari_agent/main.py
----------------------------------------------------------------------
diff --cc ambari-agent/src/main/python/ambari_agent/main.py
index de23ff6,5fcd051..3948290
--- a/ambari-agent/src/main/python/ambari_agent/main.py
+++ b/ambari-agent/src/main/python/ambari_agent/main.py
@@@ -86,9 -89,9 +86,8 @@@ import tim
  import locale
  import platform
  import ConfigParser
- import ProcessHelper
  import resource
  from logging.handlers import SysLogHandler
 -from Controller import Controller
  import AmbariConfig
  from NetUtil import NetUtil
  from PingPortListener import PingPortListener
@@@ -298,10 -290,10 +300,10 @@@ def stop_agent()
    pid = -1
    runner = shellRunner()
    try:
-     with open(ProcessHelper.pidfile, 'r') as f:
+     with open(agent_pidfile, 'r') as f:
        pid = f.read()
      pid = int(pid)
 -    
 +
      runner.run([AMBARI_SUDO_BINARY, 'kill', '-15', str(pid)])
      for i in range(GRACEFUL_STOP_TRIES):
        result = runner.run([AMBARI_SUDO_BINARY, 'kill', '-0', str(pid)])

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --cc ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index 26fbaaf,c89e767..8a53502
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@@ -374,62 -216,82 +216,82 @@@ def convert_conf_directories_to_symlink
      if not os.path.exists(d):
        need_dirs.append(d)
  
+   # log that we'll actually be creating some directories soon
    if len(need_dirs) > 0:
-     create(stack_name, package, version)
+     Logger.info("Package {0} will have the following new configuration directories created: {1}".format(
+       package, ", ".join(dry_run_directory)))
  
-     # find the matching definition and back it up (not the most efficient way) ONLY if there is more than one directory
-     if len(dirs) > 1:
-       for need_dir in need_dirs:
-         for dir_def in dirs:
-           if 'prefix' in dir_def and need_dir.startswith(dir_def['prefix']):
-             old_conf = dir_def['conf_dir']
-             versioned_conf = need_dir
-             Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf], auto_escape=False),
-               only_if = format("ls -d {old_conf}/*"))
-     elif 1 == len(dirs) and 1 == len(need_dirs):
-       old_conf = dirs[0]['conf_dir']
-       versioned_conf = need_dirs[0]
-       Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf], auto_escape=False),
-         only_if = format("ls -d {old_conf}/*"))
+   # Create the versioned /etc/[component]/[version]/0 folder (using create-conf-dir) and then
+   # set it for the installed component:
+   # - Creates /etc/<component>/<version>/0
+   # - Links <stack-root>/<version>/<component>/conf -> /etc/<component>/<version>/0
+   select(stack_name, package, version, ignore_errors = True)
  
+   # check every existing link to see if it's a link and if it's pointed to the right spot
+   for directory_struct in dirs:
+     try:
+       # check if conf is a link already
+       old_conf = directory_struct['conf_dir']
+       current_dir = directory_struct['current_dir']
+       if os.path.islink(old_conf):
+         # it's already a link; make sure it's a link to where we want it
+         if os.readlink(old_conf) != current_dir:
+           # the link isn't to the right spot; re-link it
+           Logger.info("Re-linking symlink {0} to {1}".format(old_conf, current_dir))
+           Link(old_conf, action = "delete")
+           Link(old_conf, to = current_dir)
+         else:
+           Logger.info("{0} is already linked to {1}".format(old_conf, current_dir))
+       elif os.path.isdir(old_conf):
+         # the /etc/<component>/conf directory is not a link, so turn it into one
+         Logger.info("{0} is a directory - it must be converted into a symlink".format(old_conf))
  
-   # <stack-root>/current/[component] is already set to to the correct version, e.g., <stack-root>/[version]/[component]
+         backup_dir = _get_backup_conf_directory(old_conf)
+         Logger.info("Backing up {0} to {1} if destination doesn't exist already.".format(old_conf, backup_dir))
+         Execute(("cp", "-R", "-p", old_conf, backup_dir),
+           not_if = format("test -e {backup_dir}"), sudo = True)
  
-   select(stack_name, package, version, ignore_errors = True)
+         # delete the old /etc/<component>/conf directory now that it's been backed up
+         Directory(old_conf, action = "delete")
  
-   # Symlink /etc/[component]/conf to /etc/[component]/conf.backup
-   try:
-     # No more references to /etc/[component]/conf
-     for dir_def in dirs:
-       # E.g., /etc/[component]/conf
-       new_symlink = dir_def['conf_dir']
- 
-       # Delete the existing directory/link so that linking will work
-       if not os.path.islink(new_symlink):
-         Directory(new_symlink, action = "delete")
-       else:
-         Link(new_symlink, action = "delete")
- 
-       old_conf = dir_def['conf_dir']
-       backup_dir = _get_backup_conf_directory(old_conf)
-       # link /etc/[component]/conf -> /etc/[component]/conf.backup
-       # or
-       # link /etc/[component]/conf -> <stack-root>/current/[component]-client/conf
-       if link_to == DIRECTORY_TYPE_BACKUP:
-         Link(new_symlink, to=backup_dir)
+         # link /etc/[component]/conf -> <stack-root>/current/[component]-client/conf
+         Link(old_conf, to = current_dir)
        else:
-         Link(new_symlink, to=dir_def['current_dir'])
- 
-         #HACK
+         # missing entirely
+         # /etc/<component>/conf -> <stack-root>/current/<component>/conf
          if package in ["atlas", ]:
-           Logger.info("Seeding the new conf symlink {0} from the old backup directory {1} in case any "
-                       "unmanaged artifacts are needed.".format(new_symlink, backup_dir))
-           # If /etc/[component]/conf.backup exists, then copy any artifacts not managed by Ambari to the new symlink target
-           # Be careful not to clobber any existing files.
-           Execute(as_sudo(["cp", "-R", "--no-clobber", os.path.join(backup_dir, "*"), new_symlink], auto_escape=False),
-                   only_if=format("test -e {new_symlink}"))
-   except Exception, e:
-     Logger.warning("Could not change symlink for package {0} to point to {1} directory. Error: {2}".format(package, link_to, e))
+           # HACK for Atlas
+           '''
+           In the case of Atlas, the Hive RPM installs /usr/$stack/$version/atlas with some partial packages that
+           contain Hive hooks, while the Atlas RPM is responsible for installing the full content.
 -    
++
+           If the user does not have Atlas currently installed on their stack, then /usr/$stack/current/atlas-client
+           will be a broken symlink, and we should not create the
+           symlink /etc/atlas/conf -> /usr/$stack/current/atlas-client/conf .
+           If we mistakenly create this symlink, then when the user performs an EU/RU and then adds Atlas service
+           then the Atlas RPM will not be able to copy its artifacts into /etc/atlas/conf directory and therefore
+           prevent Ambari from by copying those unmanaged contents into /etc/atlas/$version/0
+           '''
+           component_list = default("/localComponents", [])
+           if "ATLAS_SERVER" in component_list or "ATLAS_CLIENT" in component_list:
+             Logger.info("Atlas is installed on this host.")
+             parent_dir = os.path.dirname(current_dir)
+             if os.path.exists(parent_dir):
+               Link(old_conf, to = current_dir)
+             else:
+               Logger.info(
+                 "Will not create symlink from {0} to {1} because the destination's parent dir does not exist.".format(
+                   old_conf, current_dir))
+           else:
+             Logger.info(
+             "Will not create symlink from {0} to {1} because Atlas is not installed on this host.".format(
+               old_conf, current_dir))
+         else:
+           # Normal path for other packages
+           Link(old_conf, to = current_dir)
+ 
+     except Exception, e:
+       Logger.warning("Could not change symlink for package {0} to point to current directory. Error: {1}".format(package, e))
  
  
  def _seed_new_configuration_directories(package, created_directories):

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --cc ambari-common/src/main/python/resource_management/libraries/script/script.py
index e507b0a,12e6f98..97e7e50
--- a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@@ -744,6 -771,19 +771,19 @@@ class Script(object)
      """
      self.install_packages(env)
  
+   def load_available_packages(self):
+     if self.available_packages_in_repos:
+       return self.available_packages_in_repos
+ 
+ 
 -    pkg_provider = get_provider("Package")   
++    pkg_provider = get_provider("Package")
+     try:
+       self.available_packages_in_repos = pkg_provider.get_available_packages_in_repos(self.get_config()['repositoryFile']['repositories'])
+     except Exception as err:
+       Logger.exception("Unable to load available packages")
+       self.available_packages_in_repos = []
+ 
+ 
    def install_packages(self, env):
      """
      List of packages that are required< by service is received from the server
@@@ -763,15 -803,9 +803,9 @@@
          return
        pass
      try:
 -      package_list_str = config['hostLevelParams']['package_list']
 -      agent_stack_retry_on_unavailability = bool(config['hostLevelParams']['agent_stack_retry_on_unavailability'])
 -      agent_stack_retry_count = int(config['hostLevelParams']['agent_stack_retry_count'])
 +      package_list_str = config['commandParams']['package_list']
 +      agent_stack_retry_on_unavailability = bool(config['ambariLevelParams']['agent_stack_retry_on_unavailability'])
 +      agent_stack_retry_count = int(config['ambariLevelParams']['agent_stack_retry_count'])
-       pkg_provider = get_provider("Package")
-       try:
-         available_packages_in_repos = pkg_provider.get_available_packages_in_repos(config['repositoryFile']['repositories'])
-       except Exception as err:
-         Logger.exception("Unable to load available packages")
-         available_packages_in_repos = []
        if isinstance(package_list_str, basestring) and len(package_list_str) > 0:
          package_list = json.loads(package_list_str)
          for package in package_list:

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py
----------------------------------------------------------------------
diff --cc ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py
index e0356bb,2675bd9..e0356bb
mode 100644,100755..100755
--- a/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py
+++ b/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
index 79ca5ba,ff13d0b..1bf67c3
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
@@@ -20,7 -20,9 +20,8 @@@ package org.apache.ambari.server.action
  import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
  import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER;
  import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.VERSION;
+ import static org.apache.ambari.server.stack.StackManager.DEFAULT_HOOKS_FOLDER;
  
 -import java.util.HashMap;
  import java.util.Map;
  import java.util.TreeMap;
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
index 76c2019,9f8a095..f6bfdcf
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
@@@ -38,11 -37,12 +38,14 @@@ public class StatusCommand extends Agen
    private String clusterName;
  
    @SerializedName("serviceName")
 +  @com.fasterxml.jackson.annotation.JsonProperty("serviceName")
    private String serviceName;
  
+   @SerializedName("role")
+   private String role;
+ 
    @SerializedName("componentName")
 +  @com.fasterxml.jackson.annotation.JsonProperty("componentName")
    private String componentName;
  
    @SerializedName("configurations")

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/agent/stomp/dto/HostRepositories.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/agent/stomp/dto/HostRepositories.java
index 8d0cfb0,0000000..a076c66
mode 100644,000000..100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/stomp/dto/HostRepositories.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/stomp/dto/HostRepositories.java
@@@ -1,39 -1,0 +1,58 @@@
++/**
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ * <p/>
++ * http://www.apache.org/licenses/LICENSE-2.0
++ * <p/>
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++
 +package org.apache.ambari.server.agent.stomp.dto;
 +
 +import java.util.Map;
 +
 +import org.apache.ambari.server.agent.CommandRepository;
 +
 +import com.fasterxml.jackson.annotation.JsonInclude;
 +import com.fasterxml.jackson.annotation.JsonProperty;
 +
 +@JsonInclude(JsonInclude.Include.NON_EMPTY)
 +public class HostRepositories {
 +
 +  @JsonProperty("commandRepos")
 +  private Map<Long, CommandRepository> repositories;
 +
 +  @JsonProperty("componentRepos")
 +  private Map<String, Long> componentRepos;
 +
 +  public HostRepositories(Map<Long, CommandRepository> repositories, Map<String, Long> componentRepos) {
 +    this.repositories = repositories;
 +    this.componentRepos = componentRepos;
 +  }
 +
 +  public Map<Long, CommandRepository> getRepositories() {
 +    return repositories;
 +  }
 +
 +  public void setRepositories(Map<Long, CommandRepository> repositories) {
 +    this.repositories = repositories;
 +  }
 +
 +  public Map<String, Long> getComponentRepos() {
 +    return componentRepos;
 +  }
 +
 +  public void setComponentRepos(Map<String, Long> componentRepos) {
 +    this.componentRepos = componentRepos;
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
index 407238d,46ee65a..49679b4
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
@@@ -1663,21 -1448,7 +1675,25 @@@ public class AmbariMetaInfo 
      return null;
    }
  
 +  /**
 +   * Class that is used to update base urls.  There are two implementations of this - when we no
 +   * longer are sure the deprecated repo info can be removed, so too can this class.
 +   */
 +  @Experimental(feature= ExperimentalFeature.PATCH_UPGRADES)
 +  abstract static class BaseUrlUpdater<T> implements Function<RepositoryVersionEntity, T> {
 +    private T m_default;
 +
 +    private BaseUrlUpdater(T defaultValue) {
 +      m_default = defaultValue;
 +    }
 +
 +    private T getDefault() {
 +      return m_default;
 +    }
 +
 +  }
++
+   public File getCommonWidgetsDescriptorFile() {
+     return commonWidgetsDescriptorFile;
+   }
  }

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index 5ffdf26,e12477e..7a98b7c
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@@ -84,6 -89,8 +84,7 @@@ import org.apache.ambari.server.state.H
  import org.apache.ambari.server.state.MaintenanceState;
  import org.apache.ambari.server.state.PropertyInfo;
  import org.apache.ambari.server.state.PropertyInfo.PropertyType;
+ import org.apache.ambari.server.state.RefreshCommandConfiguration;
 -import org.apache.ambari.server.state.RepositoryInfo;
  import org.apache.ambari.server.state.Service;
  import org.apache.ambari.server.state.ServiceComponent;
  import org.apache.ambari.server.state.ServiceComponentHost;

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementController.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 6a7f050,1b1f524..4f57881
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@@ -177,6 -148,6 +177,7 @@@ import org.apache.ambari.server.servera
  import org.apache.ambari.server.serveraction.kerberos.KerberosOperationException;
  import org.apache.ambari.server.stack.ExtensionHelper;
  import org.apache.ambari.server.stack.RepoUtil;
++import org.apache.ambari.server.stack.StackManager;
  import org.apache.ambari.server.stageplanner.RoleGraph;
  import org.apache.ambari.server.stageplanner.RoleGraphFactory;
  import org.apache.ambari.server.state.Cluster;
@@@ -5803,259 -5682,4 +5802,259 @@@ public class AmbariManagementController
      return QuickLinkVisibilityControllerFactory.get(quickLinkProfileJson);
    }
  
 +  /**
 +   * Collects metadata info about clusters for agent.
 +   * @return metadata info about clusters
 +   * @throws AmbariException
 +   */
 +  public MetadataUpdateEvent getClustersMetadata() throws AmbariException {
 +    TreeMap<String, MetadataCluster> metadataClusters = new TreeMap<>();
 +
 +    for (Cluster cl : clusters.getClusters().values()) {
 +      StackId stackId = cl.getDesiredStackVersion();
 +
 +      SecurityType securityType = cl.getSecurityType();
 +
 +      TreeMap<String, MetadataServiceInfo> serviceLevelParams = new TreeMap<>();
 +      Collection<ServiceInfo> servicesInfo = ambariMetaInfo.getServices(stackId.getStackName(),
 +          stackId.getStackVersion()).values();
 +      for (ServiceInfo serviceInfo : servicesInfo) {
 +        Long statusCommandTimeout = null;
 +        if (serviceInfo.getCommandScript() != null) {
 +          statusCommandTimeout = new Long (ambariCustomCommandExecutionHelper.getStatusCommandTimeout(serviceInfo));
 +        }
 +
 +        String servicePackageFolder = serviceInfo.getServicePackageFolder();
 +
 +        serviceLevelParams.put(serviceInfo.getName(),
 +            new MetadataServiceInfo(serviceInfo.getVersion(),
 +                serviceInfo.isCredentialStoreEnabled(),
 +                statusCommandTimeout,
 +                servicePackageFolder));
 +      }
 +
 +      MetadataCluster metadataCluster = new MetadataCluster(securityType,
 +          serviceLevelParams,
 +          getMetadataClusterLevelParams(cl, stackId));
 +      metadataClusters.put(Long.toString(cl.getClusterId()), metadataCluster);
 +    }
 +
 +    MetadataUpdateEvent metadataUpdateEvent = new MetadataUpdateEvent(metadataClusters,
 +        getMetadataAmbariLevelParams());
 +    return metadataUpdateEvent;
 +  }
 +
 +  public MetadataUpdateEvent getClusterMetadata(Cluster cl) throws AmbariException {
 +    TreeMap<String, MetadataCluster> metadataClusters = new TreeMap<>();
 +    StackId stackId = cl.getDesiredStackVersion();
 +
 +    SecurityType securityType = cl.getSecurityType();
 +
 +    TreeMap<String, MetadataServiceInfo> serviceLevelParams = new TreeMap<>();
 +    Collection<ServiceInfo> servicesInfo = ambariMetaInfo.getServices(stackId.getStackName(),
 +        stackId.getStackVersion()).values();
 +    for (ServiceInfo serviceInfo : servicesInfo) {
 +      Long statusCommandTimeout = null;
 +      if (serviceInfo.getCommandScript() != null) {
 +        statusCommandTimeout = new Long(ambariCustomCommandExecutionHelper.getStatusCommandTimeout(serviceInfo));
 +      }
 +
 +      String servicePackageFolder = serviceInfo.getServicePackageFolder();
 +
 +      serviceLevelParams.put(serviceInfo.getName(),
 +          new MetadataServiceInfo(serviceInfo.getVersion(),
 +              serviceInfo.isCredentialStoreEnabled(),
 +              statusCommandTimeout,
 +              servicePackageFolder));
 +    }
 +
 +    MetadataCluster metadataCluster = new MetadataCluster(securityType,
 +        serviceLevelParams,
 +        getMetadataClusterLevelParams(cl, stackId));
 +    metadataClusters.put(Long.toString(cl.getClusterId()), metadataCluster);
 +
 +    MetadataUpdateEvent metadataUpdateEvent = new MetadataUpdateEvent(metadataClusters,
 +        null);
 +    return metadataUpdateEvent;
 +  }
 +
 +  public MetadataUpdateEvent getClusterMetadataOnConfigsUpdate(Cluster cl) throws AmbariException {
 +    TreeMap<String, MetadataCluster> metadataClusters = new TreeMap<>();
 +    StackId stackId = cl.getDesiredStackVersion();
 +
 +    MetadataCluster metadataCluster = new MetadataCluster(null,
 +        new TreeMap<>(),
 +        getMetadataClusterLevelConfigsParams(cl, stackId));
 +    metadataClusters.put(Long.toString(cl.getClusterId()), metadataCluster);
 +
 +    MetadataUpdateEvent metadataUpdateEvent = new MetadataUpdateEvent(metadataClusters,
 +        null);
 +    return metadataUpdateEvent;
 +  }
 +
 +  private String getClientsToUpdateConfigs(ComponentInfo componentInfo) {
 +    List<String> clientsToUpdateConfigsList = componentInfo.getClientsToUpdateConfigs();
 +    if (clientsToUpdateConfigsList == null) {
 +      clientsToUpdateConfigsList = new ArrayList<>();
 +      clientsToUpdateConfigsList.add("*");
 +    }
 +    return gson.toJson(clientsToUpdateConfigsList);
 +  }
 +
 +  private Boolean getUnlimitedKeyJCERequirement(ComponentInfo componentInfo, SecurityType clusterSecurityType) {
 +    UnlimitedKeyJCERequirement unlimitedKeyJCERequirement = componentInfo.getUnlimitedKeyJCERequired();
 +    // Ensure that the unlimited key requirement is set. If null, the default value should be used.
 +    if(unlimitedKeyJCERequirement == null) {
 +      unlimitedKeyJCERequirement = UnlimitedKeyJCERequirement.DEFAULT;
 +    }
 +
 +    return (UnlimitedKeyJCERequirement.ALWAYS == unlimitedKeyJCERequirement) ||
 +        ((UnlimitedKeyJCERequirement.KERBEROS_ENABLED == unlimitedKeyJCERequirement) &&
 +            (clusterSecurityType == SecurityType.KERBEROS));
 +
 +  }
 +
 +  //TODO will be a need to change to multi-instance usage
 +  public TreeMap<String, String> getTopologyComponentLevelParams(StackId stackId, String serviceName, String componentName,
 +                                                             SecurityType clusterSecurityType) throws AmbariException {
 +    ComponentInfo componentInfo = ambariMetaInfo.getComponent(
 +        stackId.getStackName(), stackId.getStackVersion(),
 +        serviceName, componentName);
 +
 +    TreeMap<String, String> statusCommandParams = new TreeMap<>();
 +    statusCommandParams.put(ExecutionCommand.KeyNames.CLIENTS_TO_UPDATE_CONFIGS,
 +        getClientsToUpdateConfigs(componentInfo));
 +    statusCommandParams.put(ExecutionCommand.KeyNames.UNLIMITED_KEY_JCE_REQUIRED,
 +         Boolean.toString(getUnlimitedKeyJCERequirement(componentInfo, clusterSecurityType)));
 +    return statusCommandParams;
 +  }
 +
 +  //TODO will be a need to change to multi-instance usage
 +  public TreeMap<String, String> getTopologyCommandParams(StackId stackId, String serviceName, String componentName) throws AmbariException {
 +    ServiceInfo serviceInfo = ambariMetaInfo.getService(stackId.getStackName(),
 +        stackId.getStackVersion(), serviceName);
 +    ComponentInfo componentInfo = ambariMetaInfo.getComponent(
 +        stackId.getStackName(), stackId.getStackVersion(),
 +        serviceName, componentName);
 +
 +    String scriptName = null;
 +    String scriptCommandTimeout = "";
 +    CommandScriptDefinition script = componentInfo.getCommandScript();
 +    if (serviceInfo.getSchemaVersion().equals(AmbariMetaInfo.SCHEMA_VERSION_2)) {
 +      if (script != null) {
 +        scriptName = script.getScript();
 +        if (script.getTimeout() > 0) {
 +          scriptCommandTimeout = String.valueOf(script.getTimeout());
 +        }
 +      } else {
 +        String message = String.format("Component %s of service %s has not " +
 +            "command script defined", componentName, serviceName);
 +        throw new AmbariException(message);
 +      }
 +    }
 +    String agentDefaultCommandTimeout = configs.getDefaultAgentTaskTimeout(false);
 +    String actualTimeout = (!scriptCommandTimeout.equals("") ? scriptCommandTimeout : agentDefaultCommandTimeout);
 +
 +    TreeMap<String, String> commandParams = new TreeMap<>();
 +    commandParams.put(COMMAND_TIMEOUT, actualTimeout);
 +    commandParams.put(SCRIPT, scriptName);
 +    commandParams.put(SCRIPT_TYPE, script.getScriptType().toString());
 +    return commandParams;
 +  }
 +
 +  public TreeMap<String, String> getMetadataClusterLevelParams(Cluster cluster, StackId stackId) throws AmbariException {
 +    TreeMap<String, String> clusterLevelParams = new TreeMap<>();
 +    clusterLevelParams.put(STACK_NAME, stackId.getStackName());
 +    clusterLevelParams.put(STACK_VERSION, stackId.getStackVersion());
 +
 +    Map<String, DesiredConfig> desiredConfigs = cluster.getDesiredConfigs();
 +    if (MapUtils.isNotEmpty(desiredConfigs)) {
 +
 +      Set<String> userSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.USER, cluster, desiredConfigs);
 +      String userList = gson.toJson(userSet);
 +      clusterLevelParams.put(USER_LIST, userList);
 +
 +      //Create a user_group mapping and send it as part of the hostLevelParams
 +      Map<String, Set<String>> userGroupsMap = configHelper.createUserGroupsMap(
 +          stackId, cluster, desiredConfigs);
 +      String userGroups = gson.toJson(userGroupsMap);
 +      clusterLevelParams.put(USER_GROUPS, userGroups);
 +
 +      Set<String> groupSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.GROUP, cluster, desiredConfigs);
 +      String groupList = gson.toJson(groupSet);
 +      clusterLevelParams.put(GROUP_LIST, groupList);
 +    }
 +    Set<String> notManagedHdfsPathSet = configHelper.getPropertyValuesWithPropertyType(stackId,
 +        PropertyType.NOT_MANAGED_HDFS_PATH, cluster, desiredConfigs);
 +    String notManagedHdfsPathList = gson.toJson(notManagedHdfsPathSet);
 +    clusterLevelParams.put(NOT_MANAGED_HDFS_PATH_LIST, notManagedHdfsPathList);
 +
 +    clusterLevelParams.put(CLUSTER_NAME, cluster.getClusterName());
 +
 +    StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(), stackId.getStackVersion());
-     clusterLevelParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
++    clusterLevelParams.put(HOOKS_FOLDER, StackManager.DEFAULT_HOOKS_FOLDER);
 +
 +    return clusterLevelParams;
 +  }
 +
 +  public TreeMap<String, String> getMetadataClusterLevelConfigsParams(Cluster cluster, StackId stackId) throws AmbariException {
 +    TreeMap<String, String> clusterLevelParams = new TreeMap<>();
 +
 +    Map<String, DesiredConfig> desiredConfigs = cluster.getDesiredConfigs();
 +    if (MapUtils.isNotEmpty(desiredConfigs)) {
 +
 +      Set<String> userSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.USER, cluster, desiredConfigs);
 +      String userList = gson.toJson(userSet);
 +      clusterLevelParams.put(USER_LIST, userList);
 +
 +      //Create a user_group mapping and send it as part of the hostLevelParams
 +      Map<String, Set<String>> userGroupsMap = configHelper.createUserGroupsMap(
 +          stackId, cluster, desiredConfigs);
 +      String userGroups = gson.toJson(userGroupsMap);
 +      clusterLevelParams.put(USER_GROUPS, userGroups);
 +
 +      Set<String> groupSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.GROUP, cluster, desiredConfigs);
 +      String groupList = gson.toJson(groupSet);
 +      clusterLevelParams.put(GROUP_LIST, groupList);
 +    }
 +    Set<String> notManagedHdfsPathSet = configHelper.getPropertyValuesWithPropertyType(stackId,
 +        PropertyType.NOT_MANAGED_HDFS_PATH, cluster, desiredConfigs);
 +    String notManagedHdfsPathList = gson.toJson(notManagedHdfsPathSet);
 +    clusterLevelParams.put(NOT_MANAGED_HDFS_PATH_LIST, notManagedHdfsPathList);
 +
 +    return clusterLevelParams;
 +  }
 +
 +  public TreeMap<String, String> getMetadataAmbariLevelParams() throws AmbariException {
 +    TreeMap<String, String> clusterLevelParams = new TreeMap<>();
 +    clusterLevelParams.put(JDK_LOCATION, getJdkResourceUrl());
 +    clusterLevelParams.put(JAVA_HOME, getJavaHome());
 +    clusterLevelParams.put(JAVA_VERSION, String.valueOf(configs.getJavaVersion()));
 +    clusterLevelParams.put(JDK_NAME, getJDKName());
 +    clusterLevelParams.put(JCE_NAME, getJCEName());
 +    clusterLevelParams.put(DB_NAME, getServerDB());
 +    clusterLevelParams.put(MYSQL_JDBC_URL, getMysqljdbcUrl());
 +    clusterLevelParams.put(ORACLE_JDBC_URL, getOjdbcUrl());
 +    clusterLevelParams.put(DB_DRIVER_FILENAME, configs.getMySQLJarName());
 +    clusterLevelParams.putAll(getRcaParameters());
 +    clusterLevelParams.put(HOST_SYS_PREPPED, configs.areHostsSysPrepped());
 +    clusterLevelParams.put(AGENT_STACK_RETRY_ON_UNAVAILABILITY, configs.isAgentStackRetryOnInstallEnabled());
 +    clusterLevelParams.put(AGENT_STACK_RETRY_COUNT, configs.getAgentStackRetryOnInstallCount());
 +
 +    return clusterLevelParams;
 +  }
 +
 +  @Override
 +  public HostRepositories retrieveHostRepositories(Cluster cluster, Host host) throws AmbariException {
 +    List<ServiceComponentHost> hostComponents = cluster.getServiceComponentHosts(host.getHostName());
 +    Map<Long, CommandRepository> hostRepositories = new HashMap<>();
 +    Map<String, Long> componentsRepos = new HashMap<>();
 +    for (ServiceComponentHost serviceComponentHost : hostComponents) {
 +      CommandRepository commandRepository = ambariMetaInfo.getCommandRepository(cluster,
 +          serviceComponentHost.getServiceComponent(), host);
 +      hostRepositories.put(commandRepository.getM_repoVersionId(), commandRepository);
 +      componentsRepos.put(serviceComponentHost.getServiceComponentName(), commandRepository.getM_repoVersionId());
 +    }
 +    return new HostRepositories(hostRepositories, componentsRepos);
 +  }
  }

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
index 2fd5abe,1c0853b9..2637f8e
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
@@@ -128,10 -128,11 +128,10 @@@ public class CreatePrincipalsServerActi
        seenPrincipals.add(evaluatedPrincipal);
  
        boolean processPrincipal;
-       boolean regenerateKeytabs = "true".equalsIgnoreCase(getCommandParameterValue(getCommandParameters(), REGENERATE_ALL));
+       boolean regenerateKeytabs = getOperationType(getCommandParameters()) == OperationType.RECREATE_ALL;
  
        if (regenerateKeytabs) {
 -        // do not process cached identities that can be passed as is(headless identities)
 -        processPrincipal = "false".equals(identityRecord.get(KerberosIdentityDataFileReader.ONLY_KEYTAB_WRITE).toLowerCase());
 +        processPrincipal = true;
        } else {
          KerberosPrincipalEntity kerberosPrincipalEntity = kerberosPrincipalDAO.find(evaluatedPrincipal);
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerUsersyncConfigCalculation.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
----------------------------------------------------------------------


[07/50] [abbrv] ambari git commit: AMBARI-22218 Log Search UI generated by maven build uses mock data instead of real API. (ababiichuk)

Posted by ao...@apache.org.
AMBARI-22218 Log Search UI generated by maven build uses mock data instead of real API. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/527e45d5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/527e45d5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/527e45d5

Branch: refs/heads/branch-3.0-perf
Commit: 527e45d50f2a6faebd7cf842297aa37ef53f8b16
Parents: 68df85d
Author: ababiichuk <ab...@hortonworks.com>
Authored: Thu Oct 12 15:54:31 2017 +0300
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Thu Oct 12 17:15:32 2017 +0300

----------------------------------------------------------------------
 ambari-logsearch/ambari-logsearch-web/package.json | 2 +-
 ambari-logsearch/ambari-logsearch-web/pom.xml      | 3 +++
 2 files changed, 4 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/527e45d5/ambari-logsearch/ambari-logsearch-web/package.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/package.json b/ambari-logsearch/ambari-logsearch-web/package.json
index f8525c4..2c6aa8d 100644
--- a/ambari-logsearch/ambari-logsearch-web/package.json
+++ b/ambari-logsearch/ambari-logsearch-web/package.json
@@ -6,7 +6,7 @@
     "ng": "ng",
     "start": "webpack-dev-server --port=4200",
     "build": "webpack",
-    "build-prod": "NODE_ENV='production' webpack -p",
+    "build-prod": "NODE_ENV=production webpack -p",
     "test": "karma start ./karma.conf.js",
     "lint": "ng lint",
     "e2e": "protractor ./protractor.conf.js",

http://git-wip-us.apache.org/repos/asf/ambari/blob/527e45d5/ambari-logsearch/ambari-logsearch-web/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/pom.xml b/ambari-logsearch/ambari-logsearch-web/pom.xml
index a0621d9..953f0df 100644
--- a/ambari-logsearch/ambari-logsearch-web/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-web/pom.xml
@@ -76,6 +76,9 @@
             <!-- optional: the default phase is "generate-resources" -->
             <phase>generate-resources</phase>
             <configuration>
+              <environmentVariables>
+                <NODE_ENV>production</NODE_ENV>
+              </environmentVariables>
               <!-- optional: if not specified, it will run webpack's default
               build (and you can remove this whole <configuration> section.) -->
               <arguments>-p</arguments>


[13/50] [abbrv] ambari git commit: AMBARI-22226. RU: Downgrade is failed ZKFC (ncole)

Posted by ao...@apache.org.
AMBARI-22226. RU: Downgrade is failed ZKFC (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/20faae7a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/20faae7a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/20faae7a

Branch: refs/heads/branch-3.0-perf
Commit: 20faae7a3647943b61e46f41ce27da81e91dc519
Parents: 3a0d168
Author: Nate Cole <nc...@hortonworks.com>
Authored: Fri Oct 13 08:42:12 2017 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Fri Oct 13 09:19:08 2017 -0400

----------------------------------------------------------------------
 .../upgrades/FinalizeUpgradeAction.java         | 21 ++++++++++----------
 1 file changed, 10 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/20faae7a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
index d6876d9..87ea1fc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
@@ -271,7 +271,7 @@ public class FinalizeUpgradeAction extends AbstractUpgradeServerAction {
             errors.size())).append(System.lineSeparator());
 
         for (InfoTuple error : errors) {
-          messageBuff.append(String.format("%s: $s (current = %s, desired = %s ", error.hostName,
+          messageBuff.append(String.format("%s: %s (current = %s, desired = %s)", error.hostName,
               error.componentName, error.currentVersion, error.targetVersion));
 
           messageBuff.append(System.lineSeparator());
@@ -367,23 +367,22 @@ public class FinalizeUpgradeAction extends AbstractUpgradeServerAction {
     Set<InfoTuple> errors = new TreeSet<>();
 
     Cluster cluster = upgradeContext.getCluster();
-    RepositoryVersionEntity repositoryVersionEntity = upgradeContext.getRepositoryVersion();
-    StackId targetStackId = repositoryVersionEntity.getStackId();
-
     Set<String> servicesParticipating = upgradeContext.getSupportedServices();
     for (String serviceName : servicesParticipating) {
       Service service = cluster.getService(serviceName);
-      String targetVersion = upgradeContext.getTargetVersion(serviceName);
+      RepositoryVersionEntity repositoryVersionEntity = upgradeContext.getTargetRepositoryVersion(serviceName);
+      StackId targetStackId = repositoryVersionEntity.getStackId();
+      String targetVersion = repositoryVersionEntity.getVersion();
 
       for (ServiceComponent serviceComponent : service.getServiceComponents().values()) {
-        for (ServiceComponentHost serviceComponentHost : serviceComponent.getServiceComponentHosts().values()) {
-          ComponentInfo componentInfo = ambariMetaInfo.getComponent(targetStackId.getStackName(),
-                  targetStackId.getStackVersion(), service.getName(), serviceComponent.getName());
+        ComponentInfo componentInfo = ambariMetaInfo.getComponent(targetStackId.getStackName(),
+            targetStackId.getStackVersion(), service.getName(), serviceComponent.getName());
 
-          if (!componentInfo.isVersionAdvertised()) {
-            continue;
-          }
+        if (!componentInfo.isVersionAdvertised()) {
+          continue;
+        }
 
+        for (ServiceComponentHost serviceComponentHost : serviceComponent.getServiceComponentHosts().values()) {
           if (!StringUtils.equals(targetVersion, serviceComponentHost.getVersion())) {
             errors.add(new InfoTuple(service.getName(), serviceComponent.getName(),
                 serviceComponentHost.getHostName(), serviceComponentHost.getVersion(),


[28/50] [abbrv] ambari git commit: AMBARI-22245 - YARN Service Checks Fails Because of Old hadoop-client Classpath Entry (jonathanhurley)

Posted by ao...@apache.org.
AMBARI-22245 - YARN Service Checks Fails Because of Old hadoop-client Classpath Entry (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4a223503
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4a223503
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4a223503

Branch: refs/heads/branch-3.0-perf
Commit: 4a2235037e2306ac8197f8d1bf917f5402f248bb
Parents: 523f505
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon Oct 16 15:33:17 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon Oct 16 16:13:00 2017 -0400

----------------------------------------------------------------------
 .../2.1.0.2.0/package/scripts/params_linux.py   | 34 +++++++++++--
 .../2.1.0.2.0/package/scripts/status_params.py  | 52 ++++++++-----------
 .../YARN/3.0.0.3.0/configuration/yarn-site.xml  |  2 +-
 .../3.0.0.3.0/package/scripts/params_linux.py   | 53 +++++++++++++++-----
 .../3.0.0.3.0/package/scripts/status_params.py  | 52 ++++++++-----------
 .../services/YARN/configuration/yarn-site.xml   |  2 +-
 .../stacks/HDP/2.6/upgrades/config-upgrade.xml  |  7 +++
 .../HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml |  7 ++-
 .../stacks/HDP/2.6/upgrades/upgrade-2.6.xml     |  1 +
 9 files changed, 124 insertions(+), 86 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4a223503/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 2ab779f..620408b 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -20,8 +20,10 @@ Ambari Agent
 """
 import os
 
+from resource_management.core import sudo
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.functions import component_version
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
@@ -88,6 +90,7 @@ stack_supports_ranger_audit_db = check_stack_feature(StackFeature.RANGER_AUDIT_D
 hostname = config['hostname']
 
 # hadoop default parameters
+hadoop_home = status_params.hadoop_home
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
 hadoop_bin = stack_select.get_hadoop_dir("sbin")
 hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
@@ -113,12 +116,33 @@ if stack_supports_ru:
   if command_role in YARN_SERVER_ROLE_DIRECTORY_MAP:
     yarn_role_root = YARN_SERVER_ROLE_DIRECTORY_MAP[command_role]
 
-  hadoop_mapred2_jar_location = format("{stack_root}/current/{mapred_role_root}")
-  mapred_bin = format("{stack_root}/current/{mapred_role_root}/sbin")
-
+  # defaults set to current based on role
+  hadoop_mapr_home = format("{stack_root}/current/{mapred_role_root}")
   hadoop_yarn_home = format("{stack_root}/current/{yarn_role_root}")
-  yarn_bin = format("{stack_root}/current/{yarn_role_root}/sbin")
-  yarn_container_bin = format("{stack_root}/current/{yarn_role_root}/bin")
+
+  # try to render the specific version
+  version = component_version.get_component_repository_version()
+  if version is None:
+    version = default("/commandParams/version", None)
+
+
+  if version is not None:
+    hadoop_mapr_versioned_home = format("{stack_root}/{version}/hadoop-mapreduce")
+    hadoop_yarn_versioned_home = format("{stack_root}/{version}/hadoop-yarn")
+
+    if sudo.path_isdir(hadoop_mapr_versioned_home):
+      hadoop_mapr_home = hadoop_mapr_versioned_home
+
+    if sudo.path_isdir(hadoop_yarn_versioned_home):
+      hadoop_yarn_home = hadoop_yarn_versioned_home
+
+
+  hadoop_mapred2_jar_location = hadoop_mapr_home
+  mapred_bin = format("{hadoop_mapr_home}/sbin")
+
+  yarn_bin = format("{hadoop_yarn_home}/sbin")
+  yarn_container_bin = format("{hadoop_yarn_home}/bin")
+
 
 if stack_supports_timeline_state_store:
   # Timeline Service property that was added timeline_state_store stack feature

http://git-wip-us.apache.org/repos/asf/ambari/blob/4a223503/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
index c2e9d92..6bb528f 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
@@ -19,43 +19,31 @@ limitations under the License.
 """
 from resource_management.libraries.script.script import Script
 from resource_management.libraries import functions
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.default import default
-from ambari_commons import OSCheck
 
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-if OSCheck.is_windows_family():
-  resourcemanager_win_service_name = 'resourcemanager'
-  nodemanager_win_service_name = 'nodemanager'
-  historyserver_win_service_name = 'historyserver'
-  timelineserver_win_service_name = 'timelineserver'
-
-  service_map = {
-    'resourcemanager' : resourcemanager_win_service_name,
-    'nodemanager' : nodemanager_win_service_name,
-    'historyserver' : historyserver_win_service_name,
-    'timelineserver' : timelineserver_win_service_name
-  }
-else:
-  mapred_user = config['configurations']['mapred-env']['mapred_user']
-  yarn_user = config['configurations']['yarn-env']['yarn_user']
-  yarn_pid_dir_prefix = config['configurations']['yarn-env']['yarn_pid_dir_prefix']
-  mapred_pid_dir_prefix = config['configurations']['mapred-env']['mapred_pid_dir_prefix']
-  yarn_pid_dir = format("{yarn_pid_dir_prefix}/{yarn_user}")
-  mapred_pid_dir = format("{mapred_pid_dir_prefix}/{mapred_user}")
-
-  resourcemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-resourcemanager.pid")
-  nodemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-nodemanager.pid")
-  yarn_historyserver_pid_file_old = format("{yarn_pid_dir}/yarn-{yarn_user}-historyserver.pid")
-  yarn_historyserver_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-timelineserver.pid")  # *-historyserver.pid is deprecated
-  mapred_historyserver_pid_file = format("{mapred_pid_dir}/mapred-{mapred_user}-historyserver.pid")
-
-  hadoop_conf_dir = functions.conf_select.get_hadoop_conf_dir()
-
-  hostname = config['hostname']
-  kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-  security_enabled = config['configurations']['cluster-env']['security_enabled']
+mapred_user = config['configurations']['mapred-env']['mapred_user']
+yarn_user = config['configurations']['yarn-env']['yarn_user']
+yarn_pid_dir_prefix = config['configurations']['yarn-env']['yarn_pid_dir_prefix']
+mapred_pid_dir_prefix = config['configurations']['mapred-env']['mapred_pid_dir_prefix']
+yarn_pid_dir = format("{yarn_pid_dir_prefix}/{yarn_user}")
+mapred_pid_dir = format("{mapred_pid_dir_prefix}/{mapred_user}")
+
+resourcemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-resourcemanager.pid")
+nodemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-nodemanager.pid")
+yarn_historyserver_pid_file_old = format("{yarn_pid_dir}/yarn-{yarn_user}-historyserver.pid")
+yarn_historyserver_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-timelineserver.pid")  # *-historyserver.pid is deprecated
+mapred_historyserver_pid_file = format("{mapred_pid_dir}/mapred-{mapred_user}-historyserver.pid")
+
+hadoop_home = stack_select.get_hadoop_dir("home")
+hadoop_conf_dir = functions.conf_select.get_hadoop_conf_dir()
+
+hostname = config['hostname']
+kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+security_enabled = config['configurations']['cluster-env']['security_enabled']
 
 stack_name = default("/hostLevelParams/stack_name", None)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4a223503/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-site.xml b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-site.xml
index 2a69d35..6f571e4 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-site.xml
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-site.xml
@@ -151,7 +151,7 @@
   </property>
   <property>
     <name>yarn.application.classpath</name>
-    <value>$HADOOP_CONF_DIR,{{stack_root}}/current/hadoop-client/*,{{stack_root}}/current/hadoop-client/lib/*,{{stack_root}}/current/hadoop-hdfs-client/*,{{stack_root}}/current/hadoop-hdfs-client/lib/*,{{stack_root}}/current/hadoop-yarn-client/*,{{stack_root}}/current/hadoop-yarn-client/lib/*</value>
+    <value>$HADOOP_CONF_DIR,{{hadoop_home}}/*,{{hadoop_home}}/lib/*,{{stack_root}}/current/hadoop-hdfs-client/*,{{stack_root}}/current/hadoop-hdfs-client/lib/*,{{stack_root}}/current/hadoop-yarn-client/*,{{stack_root}}/current/hadoop-yarn-client/lib/*</value>
     <description>Classpath for typical applications.</description>
     <on-ambari-upgrade add="false"/>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4a223503/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
index e66ec3c..e4dbe2c 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
@@ -20,8 +20,10 @@ Ambari Agent
 """
 import os
 
+from resource_management.core import sudo
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.functions import component_version
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
@@ -30,12 +32,13 @@ from resource_management.libraries.functions.stack_features import check_stack_f
 from resource_management.libraries.functions.stack_features import get_stack_feature_version
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
-from resource_management.libraries.functions.version import format_stack_version
+from resource_management.libraries.functions.version import format_stack_version, get_major_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries import functions
 from resource_management.libraries.functions import is_empty
 from resource_management.libraries.functions.get_architecture import get_architecture
 from resource_management.libraries.functions.setup_ranger_plugin_xml import get_audit_configs, generate_ranger_service_config
+
 import status_params
 
 # a map of the Ambari role to the component name
@@ -65,30 +68,33 @@ tarball_map = default("/configurations/cluster-env/tarball_map", None)
 config_path = os.path.join(stack_root, "current/hadoop-client/conf")
 config_dir = os.path.realpath(config_path)
 
+# get the correct version to use for checking stack features
+version_for_stack_feature_checks = get_stack_feature_version(config)
+
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted_major = format_stack_version(stack_version_unformatted)
 stack_version_formatted = functions.get_stack_version('hadoop-yarn-resourcemanager')
+major_stack_version = get_major_version(stack_version_formatted_major)
 
-stack_supports_ru = stack_version_formatted_major and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted_major)
-stack_supports_timeline_state_store = stack_version_formatted_major and check_stack_feature(StackFeature.TIMELINE_STATE_STORE, stack_version_formatted_major)
+stack_supports_ru = check_stack_feature(StackFeature.ROLLING_UPGRADE, version_for_stack_feature_checks)
+stack_supports_timeline_state_store = check_stack_feature(StackFeature.TIMELINE_STATE_STORE, version_for_stack_feature_checks)
 
 # New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade.
 # It cannot be used during the initial Cluser Install because the version is not yet known.
 version = default("/commandParams/version", None)
 
-# get the correct version to use for checking stack features
-version_for_stack_feature_checks = get_stack_feature_version(config)
-
 stack_supports_ranger_kerberos = check_stack_feature(StackFeature.RANGER_KERBEROS_SUPPORT, version_for_stack_feature_checks)
 stack_supports_ranger_audit_db = check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, version_for_stack_feature_checks)
 
 hostname = config['hostname']
 
 # hadoop default parameters
+hadoop_home = status_params.hadoop_home
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
 hadoop_bin = stack_select.get_hadoop_dir("sbin")
 hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
+hadoop_lib_home = stack_select.get_hadoop_dir("lib")
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_yarn_home = '/usr/lib/hadoop-yarn'
 hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
@@ -110,12 +116,33 @@ if stack_supports_ru:
   if command_role in YARN_SERVER_ROLE_DIRECTORY_MAP:
     yarn_role_root = YARN_SERVER_ROLE_DIRECTORY_MAP[command_role]
 
-  hadoop_mapred2_jar_location = format("{stack_root}/current/{mapred_role_root}")
-  mapred_bin = format("{stack_root}/current/{mapred_role_root}/sbin")
-
+  # defaults set to current based on role
+  hadoop_mapr_home = format("{stack_root}/current/{mapred_role_root}")
   hadoop_yarn_home = format("{stack_root}/current/{yarn_role_root}")
-  yarn_bin = format("{stack_root}/current/{yarn_role_root}/sbin")
-  yarn_container_bin = format("{stack_root}/current/{yarn_role_root}/bin")
+
+  # try to render the specific version
+  version = component_version.get_component_repository_version()
+  if version is None:
+    version = default("/commandParams/version", None)
+
+
+  if version is not None:
+    hadoop_mapr_versioned_home = format("{stack_root}/{version}/hadoop-mapreduce")
+    hadoop_yarn_versioned_home = format("{stack_root}/{version}/hadoop-yarn")
+
+    if sudo.path_isdir(hadoop_mapr_versioned_home):
+      hadoop_mapr_home = hadoop_mapr_versioned_home
+
+    if sudo.path_isdir(hadoop_yarn_versioned_home):
+      hadoop_yarn_home = hadoop_yarn_versioned_home
+
+
+  hadoop_mapred2_jar_location = hadoop_mapr_home
+  mapred_bin = format("{hadoop_mapr_home}/sbin")
+
+  yarn_bin = format("{hadoop_yarn_home}/sbin")
+  yarn_container_bin = format("{hadoop_yarn_home}/bin")
+
 
 if stack_supports_timeline_state_store:
   # Timeline Service property that was added timeline_state_store stack feature
@@ -347,7 +374,7 @@ HdfsResource = functools.partial(
   immutable_paths = get_not_managed_resources(),
   dfs_type = dfs_type
  )
-update_files_only = default("/commandParams/update_files_only", False)
+update_files_only = default("/commandParams/update_files_only",False)
 
 mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
 
@@ -365,7 +392,7 @@ cgroups_dir = "/cgroups_test/cpu"
 
 # hostname of the active HDFS HA Namenode (only used when HA is enabled)
 dfs_ha_namenode_active = default("/configurations/hadoop-env/dfs_ha_initial_namenode_active", None)
-if dfs_ha_namenode_active is not None: 
+if dfs_ha_namenode_active is not None:
   namenode_hostname = dfs_ha_namenode_active
 else:
   namenode_hostname = config['clusterHostInfo']['namenode_host'][0]

http://git-wip-us.apache.org/repos/asf/ambari/blob/4a223503/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/status_params.py
index 7c1b2c7..6bb528f 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/status_params.py
@@ -19,43 +19,31 @@ limitations under the License.
 """
 from resource_management.libraries.script.script import Script
 from resource_management.libraries import functions
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.default import default
-from ambari_commons import OSCheck
 
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-if OSCheck.is_windows_family():
-  resourcemanager_win_service_name = 'resourcemanager'
-  nodemanager_win_service_name = 'nodemanager'
-  historyserver_win_service_name = 'historyserver'
-  timelineserver_win_service_name = 'timelineserver'
-
-  service_map = {
-    'resourcemanager' : resourcemanager_win_service_name,
-    'nodemanager' : nodemanager_win_service_name,
-    'historyserver' : historyserver_win_service_name,
-    'timelineserver' : timelineserver_win_service_name
-  }
-else:
-  mapred_user = config['configurations']['mapred-env']['mapred_user']
-  yarn_user = config['configurations']['yarn-env']['yarn_user']
-  yarn_pid_dir_prefix = config['configurations']['yarn-env']['yarn_pid_dir_prefix']
-  mapred_pid_dir_prefix = config['configurations']['mapred-env']['mapred_pid_dir_prefix']
-  yarn_pid_dir = format("{yarn_pid_dir_prefix}/{yarn_user}")
-  mapred_pid_dir = format("{mapred_pid_dir_prefix}/{mapred_user}")
-
-  resourcemanager_pid_file = format("{yarn_pid_dir}/hadoop-{yarn_user}-resourcemanager.pid")
-  nodemanager_pid_file = format("{yarn_pid_dir}/hadoop-{yarn_user}-nodemanager.pid")
-  yarn_historyserver_pid_file_old = format("{yarn_pid_dir}/hadoop-{yarn_user}-historyserver.pid")
-  yarn_historyserver_pid_file = format("{yarn_pid_dir}/hadoop-{yarn_user}-timelineserver.pid")  # *-historyserver.pid is deprecated
-  mapred_historyserver_pid_file = format("{mapred_pid_dir}/hadoop-{mapred_user}-historyserver.pid")
-
-  hadoop_conf_dir = functions.conf_select.get_hadoop_conf_dir()
-
-  hostname = config['hostname']
-  kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-  security_enabled = config['configurations']['cluster-env']['security_enabled']
+mapred_user = config['configurations']['mapred-env']['mapred_user']
+yarn_user = config['configurations']['yarn-env']['yarn_user']
+yarn_pid_dir_prefix = config['configurations']['yarn-env']['yarn_pid_dir_prefix']
+mapred_pid_dir_prefix = config['configurations']['mapred-env']['mapred_pid_dir_prefix']
+yarn_pid_dir = format("{yarn_pid_dir_prefix}/{yarn_user}")
+mapred_pid_dir = format("{mapred_pid_dir_prefix}/{mapred_user}")
+
+resourcemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-resourcemanager.pid")
+nodemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-nodemanager.pid")
+yarn_historyserver_pid_file_old = format("{yarn_pid_dir}/yarn-{yarn_user}-historyserver.pid")
+yarn_historyserver_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-timelineserver.pid")  # *-historyserver.pid is deprecated
+mapred_historyserver_pid_file = format("{mapred_pid_dir}/mapred-{mapred_user}-historyserver.pid")
+
+hadoop_home = stack_select.get_hadoop_dir("home")
+hadoop_conf_dir = functions.conf_select.get_hadoop_conf_dir()
+
+hostname = config['hostname']
+kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+security_enabled = config['configurations']['cluster-env']['security_enabled']
 
 stack_name = default("/hostLevelParams/stack_name", None)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4a223503/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
index b5eedea..71c63c1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
@@ -19,7 +19,7 @@
 <configuration supports_final="true">
   <property>
     <name>yarn.application.classpath</name>
-    <value>/etc/hadoop/conf,/usr/hdp/current/hadoop-client/*,/usr/hdp/current/hadoop-client/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*,/usr/hdp/current/ext/hadoop/*</value>
+    <value>{{hadoop_home}}/conf,{{hadoop_home}}/*,{{hadoop_home}}/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*,/usr/hdp/current/ext/hadoop/*</value>
     <description>Classpath for typical applications.</description>
     <on-ambari-upgrade add="true"/>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4a223503/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
index fd7e438..91044d8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
@@ -155,6 +155,13 @@
               if-key="yarn.resourcemanager.monitor.capacity.preemption.natural_termination_factor"
               if-key-state="absent" />
           </definition>
+
+
+          <definition xsi:type="configure" id="hdp_2_6_yarn_app_classpath_parameterization" summary="Application Classpath Parameterization">
+            <type>yarn-site</type>
+            <replace key="yarn.application.classpath" find="/usr/hdp/current/hadoop-client" replace-with="{{hadoop_home}}"/>
+          </definition>
+
         </changes>
       </component>
       <component name="NODEMANAGER">

http://git-wip-us.apache.org/repos/asf/ambari/blob/4a223503/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
index 832c505..0b5e8c1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
@@ -279,8 +279,7 @@
     <!-- After processing this group, will change the effective Stack of the UpgradeContext object. -->
     <group xsi:type="update-stack" name="UPDATE_DESIRED_REPOSITORY_ID" title="Update Target Repositories">
       <execute-stage title="Update Target Repositories">
-        <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.UpdateDesiredRepositoryAction">
-        </task>
+        <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.UpdateDesiredRepositoryAction"/>
       </execute-stage>
     </group>
 
@@ -343,6 +342,10 @@
         <task xsi:type="configure" id="hdp_2_6_yarn_preemption"/>
       </execute-stage>
 
+      <execute-stage service="YARN" component="RESOURCEMANAGER" title="Application Classpath Parameterization">
+        <task xsi:type="configure" id="hdp_2_6_yarn_app_classpath_parameterization" supports-patch="true"/>
+      </execute-stage>
+
       <!-- YARN -->
       <execute-stage service="YARN" component="NODEMANAGER" title="Apply config changes for YARN NM">
         <task xsi:type="configure" id="hdp_2_6_0_0_yarn_nodemanager_llap_mem">

http://git-wip-us.apache.org/repos/asf/ambari/blob/4a223503/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
index d0e11a1..00597c6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
@@ -696,6 +696,7 @@
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_6_maint_ranger_yarn_plugin_cluster_name"/>
           <task xsi:type="configure" id="hdp_2_6_yarn_preemption" />
+          <task xsi:type="configure" id="hdp_2_6_yarn_app_classpath_parameterization" supports-patch="true"/>
         </pre-upgrade>
         <pre-downgrade/> <!--  no-op to prevent config changes on downgrade -->
         <upgrade>


[47/50] [abbrv] ambari git commit: AMBARI-22267 - Version registration failure during patch upgrade + Debian + Oracle run (jonathanhurley)

Posted by ao...@apache.org.
AMBARI-22267 - Version registration failure during patch upgrade + Debian + Oracle run (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b4eddc97
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b4eddc97
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b4eddc97

Branch: refs/heads/branch-3.0-perf
Commit: b4eddc9770eda0e47fe025254ba6143a581a7855
Parents: 2ae81d9
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Oct 18 19:27:34 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Oct 18 19:41:23 2017 -0400

----------------------------------------------------------------------
 .../ambari/server/orm/entities/RepositoryVersionEntity.java    | 5 +----
 .../ambari/server/orm/entities/ServiceDesiredStateEntity.java  | 6 +++---
 2 files changed, 4 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b4eddc97/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
index b4f59dc..7eedc4d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
@@ -21,13 +21,11 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Set;
 
-import javax.persistence.Basic;
 import javax.persistence.CascadeType;
 import javax.persistence.Column;
 import javax.persistence.Entity;
 import javax.persistence.EnumType;
 import javax.persistence.Enumerated;
-import javax.persistence.FetchType;
 import javax.persistence.GeneratedValue;
 import javax.persistence.GenerationType;
 import javax.persistence.Id;
@@ -92,7 +90,7 @@ import com.google.inject.Provider;
         query = "SELECT repositoryVersion FROM RepositoryVersionEntity repositoryVersion WHERE repositoryVersion.version = :version ORDER BY repositoryVersion.id DESC"),
     @NamedQuery(
         name = "findByServiceDesiredVersion",
-        query = "SELECT DISTINCT sd.desiredRepositoryVersion from ServiceDesiredStateEntity sd WHERE sd.desiredRepositoryVersion IN ?1") })
+        query = "SELECT repositoryVersion FROM RepositoryVersionEntity repositoryVersion WHERE repositoryVersion IN (SELECT DISTINCT sd1.desiredRepositoryVersion FROM ServiceDesiredStateEntity sd1 WHERE sd1.desiredRepositoryVersion IN ?1)") })
 @StaticallyInject
 public class RepositoryVersionEntity {
   private static final Logger LOG = LoggerFactory.getLogger(RepositoryVersionEntity.class);
@@ -129,7 +127,6 @@ public class RepositoryVersionEntity {
   @Enumerated(value = EnumType.STRING)
   private RepositoryType type = RepositoryType.STANDARD;
 
-  @Basic(fetch=FetchType.LAZY)
   @Lob
   @Column(name="version_xml", insertable = true, updatable = true)
   private String versionXml;

http://git-wip-us.apache.org/repos/asf/ambari/blob/b4eddc97/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceDesiredStateEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceDesiredStateEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceDesiredStateEntity.java
index fc26478..dbb999e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceDesiredStateEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceDesiredStateEntity.java
@@ -39,20 +39,20 @@ import org.apache.commons.lang.builder.EqualsBuilder;
 @Entity
 public class ServiceDesiredStateEntity {
 
-  @Column(name = "cluster_id", nullable = false, insertable = false, updatable = false, length = 10)
   @Id
+  @Column(name = "cluster_id", nullable = false, insertable = false, updatable = false, length = 10)  
   private Long clusterId;
 
-  @Column(name = "service_name", nullable = false, insertable = false, updatable = false)
   @Id
+  @Column(name = "service_name", nullable = false, insertable = false, updatable = false)
   private String serviceName;
 
   @Column(name = "desired_state", nullable = false, insertable = true, updatable = true)
   @Enumerated(value = EnumType.STRING)
   private State desiredState = State.INIT;
 
-  @Column(name = "desired_host_role_mapping", nullable = false, insertable = true, updatable = true, length = 10)
   @Basic
+  @Column(name = "desired_host_role_mapping", nullable = false, insertable = true, updatable = true, length = 10)  
   private int desiredHostRoleMapping = 0;
 
   @Column(name = "maintenance_state", nullable = false, insertable = true, updatable = true)


[45/50] [abbrv] ambari git commit: AMBARI-22228 - Remove Remote Tea Runtime library dependency from Storm binaries (Arun Mahadevan via jonathanhurley)

Posted by ao...@apache.org.
AMBARI-22228 - Remove Remote Tea Runtime library dependency from Storm binaries (Arun Mahadevan via jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8852f33b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8852f33b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8852f33b

Branch: refs/heads/branch-3.0-perf
Commit: 8852f33b90d4983dd9927184f0c20153eaf3b065
Parents: 3b5cbed
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Oct 18 16:54:16 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Oct 18 17:27:11 2017 -0400

----------------------------------------------------------------------
 .../resources/stacks/HDP/2.3/upgrades/config-upgrade.xml  |  5 +++++
 .../stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml    |  3 +++
 .../resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml     |  1 +
 .../resources/stacks/HDP/2.4/upgrades/config-upgrade.xml  |  4 ++++
 .../stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml    |  3 +++
 .../resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml     |  1 +
 .../resources/stacks/HDP/2.5/upgrades/config-upgrade.xml  | 10 ++++++++++
 .../stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml    |  3 +++
 .../resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml     |  1 +
 .../HDP/2.6/services/STORM/configuration/storm-site.xml   |  6 +++---
 .../resources/stacks/HDP/2.6/upgrades/config-upgrade.xml  |  4 ++++
 .../stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml    |  4 ++++
 .../resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml     |  1 +
 13 files changed, 43 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8852f33b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
index ff12150..c4a5b95 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
@@ -660,6 +660,11 @@
                      replace-with="${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}"/>
           </definition>
 
+          <definition xsi:type="configure" id="storm_remove_jmxetric" summary="Removing jmxetric from childopts.">
+            <type>storm-site</type>
+            <regex-replace key="content" find=" -javaagent:.*JVM" replace-with=""/>
+          </definition>
+
         </changes>
       </component>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852f33b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
index 4034f4b..907626d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
@@ -671,6 +671,9 @@
           <summary>Update Storm log directory in storm worker log4j</summary>
         </task>
       </execute-stage>
+      <execute-stage service="STORM" component="NIMBUS" title="Removing jmxetric from childopts">
+        <task xsi:type="configure" id="storm_remove_jmxetric"/>
+      </execute-stage>
 
       <!--ATLAS-->
       <execute-stage service="ATLAS" component="ATLAS_SERVER" title="Parameterizing Atlas Log4J Properties">

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852f33b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
index 2e2c9c3..567e6e1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
@@ -1130,6 +1130,7 @@
           <task xsi:type="configure" id="storm_worker_log4j_parameterize" />
           <task xsi:type="configure" id="storm_cluster_log4j_parameterize" />
           <task xsi:type="configure" id="storm_worker_log4j_directory" />
+          <task xsi:type="configure" id="storm_remove_jmxetric"/>
         </pre-upgrade>
 
         <pre-downgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852f33b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
index 5c1f33f..64121a4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
@@ -481,6 +481,10 @@
             <replace key="content" find="${sys:storm.log.dir}/${sys:logfile.name}"
                      replace-with="${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}"/>
           </definition>
+          <definition xsi:type="configure" id="storm_remove_jmxetric" summary="Removing jmxetric from childopts.">
+            <type>storm-site</type>
+            <regex-replace key="content" find=" -javaagent:.*JVM" replace-with=""/>
+          </definition>
         </changes>
       </component>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852f33b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
index 4703709..faf5b76 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
@@ -591,6 +591,9 @@
           <summary>Update Storm log directory in storm worker log4j</summary>
         </task>
       </execute-stage>
+      <execute-stage service="STORM" component="NIMBUS" title="Removing jmxetric from childopts">
+        <task xsi:type="configure" id="storm_remove_jmxetric"/>
+      </execute-stage>
 
       <!-- KAFKA -->
       <execute-stage service="KAFKA" component="KAFKA_BROKER" title="Apply config changes for Kafka Broker">

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852f33b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
index 35187c7..572a259 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
@@ -1092,6 +1092,7 @@
           <task xsi:type="configure" id="storm_worker_log4j_parameterize" />
           <task xsi:type="configure" id="storm_cluster_log4j_parameterize" />
           <task xsi:type="configure" id="storm_worker_log4j_directory" />
+          <task xsi:type="configure" id="storm_remove_jmxetric"/>
         </pre-upgrade>
 
         <pre-downgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852f33b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
index cf42e93..d138d60 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
@@ -63,6 +63,16 @@
             <regex-replace key="content" find="A1&quot; immediateFlush=&quot;false&quot;&#xA;                 fileName=&quot;\$\{sys:storm.log.dir}/\$\{sys:logfile.name}&quot;&#xA;                 filePattern=&quot;\$\{sys:storm.log.dir}/\$\{sys:logfile.name}.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;\$\{pattern}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;(?:[0-9]+) MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;([0-9]+)"
                                          replace-with="A1&quot; immediateFlush=&quot;false&quot;&#xA;                 fileName=&quot;${sys:storm.log.dir}/${sys:logfile.name}&quot;&#xA;                 filePattern=&quot;${sys:storm.log.dir}/${sys:logfile.name}.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;${pattern}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;{{storm_a1_maxfilesize}} MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;{{storm_a1_maxbackupindex}}"/>
           </definition>
+          <definition xsi:type="configure" id="storm_nimbus_autocred_config" summary="Update Storm's Nimbus AutoCred config">
+            <type>storm-site</type>
+            <set key="nimbus.autocredential.plugins.classes" value="['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']" if-type="streamline-common" if-key="authorizer.class.name" if-key-state="present"/>
+            <set key="nimbus.credential.renewers.classes" value="['org.apache.storm.hdfs.security.AutoHDFS', 'org.apache.storm.hbase.security.AutoHBase', 'org.apache.storm.hive.security.AutoHive']" if-type="streamline-common" if-key="authorizer.class.name" if-key-state="present"/>
+            <set key="nimbus.credential.renewers.freq.secs" value="82800" if-type="streamline-common" if-key="authorizer.class.name" if-key-state="present"/>
+          </definition>
+          <definition xsi:type="configure" id="storm_remove_jmxetric" summary="Removing jmxetric from childopts.">
+            <type>storm-site</type>
+            <regex-replace key="content" find=" -javaagent:.*JVM" replace-with=""/>
+          </definition>
         </changes>
       </component>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852f33b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
index cd69a9c..8012c90 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
@@ -526,6 +526,9 @@
           <summary>Updating the Storm cluster Log4J properties to include parameterizations</summary>
         </task>
       </execute-stage>
+      <execute-stage service="STORM" component="NIMBUS" title="Removing jmxetric from childopts">
+        <task xsi:type="configure" id="storm_remove_jmxetric"/>
+      </execute-stage>
 
       <!-- PIG -->
       <execute-stage service="PIG" component="PIG" title="Apply config changes for Pig">

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852f33b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
index a6d3f29..7c43948 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
@@ -1050,6 +1050,7 @@
         <pre-upgrade>
           <task xsi:type="configure" id="storm_worker_log4j_parameterize" />
           <task xsi:type="configure" id="storm_cluster_log4j_parameterize" />
+          <task xsi:type="configure" id="storm_remove_jmxetric"/>
         </pre-upgrade>
         <pre-downgrade/>
         <upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852f33b/ambari-server/src/main/resources/stacks/HDP/2.6/services/STORM/configuration/storm-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/STORM/configuration/storm-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/STORM/configuration/storm-site.xml
index 902fdc9..74262b6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/STORM/configuration/storm-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/STORM/configuration/storm-site.xml
@@ -29,7 +29,7 @@
   </property>
   <property>
     <name>nimbus.childopts</name>
-    <value>-Xmx1024m _JAAS_PLACEHOLDER -javaagent:/usr/hdp/current/storm-nimbus/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8649,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm-nimbus/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Nimbus_JVM</value>
+    <value>-Xmx1024m _JAAS_PLACEHOLDER</value>
     <description>This parameter is used by the storm-deploy project to configure the jvm options for the nimbus daemon.</description>
     <value-attributes>
       <overridable>false</overridable>
@@ -38,7 +38,7 @@
   </property>
   <property>
     <name>worker.childopts</name>
-    <value>-Xmx768m _JAAS_PLACEHOLDER -javaagent:/usr/hdp/current/storm-client/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm-client/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Worker_%ID%_JVM</value>
+    <value>-Xmx768m _JAAS_PLACEHOLDER</value>
     <description>The jvm opts provided to workers launched by this supervisor. All \"%ID%\" substrings are replaced with an identifier for this worker.</description>
     <value-attributes>
       <type>multiLine</type>
@@ -47,7 +47,7 @@
   </property>
   <property>
     <name>supervisor.childopts</name>
-    <value>-Xmx256m _JAAS_PLACEHOLDER -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port={{jmxremote_port}} -javaagent:/usr/hdp/current/storm-supervisor/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm-supervisor/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Supervisor_JVM</value>
+    <value>-Xmx256m _JAAS_PLACEHOLDER -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port={{jmxremote_port}}</value>
     <description>This parameter is used by the storm-deploy project to configure the jvm options for the supervisor daemon.</description>
     <value-attributes>
       <overridable>false</overridable>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852f33b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
index 91044d8..647bdeb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
@@ -132,6 +132,10 @@
             <set key="ranger.plugin.storm.ambari.cluster.name" value="{{cluster_name}}"
               if-type="ranger-storm-plugin-properties" if-key="ranger-storm-plugin-enabled" if-key-state="present"/>
           </definition>
+          <definition xsi:type="configure" id="storm_remove_jmxetric" summary="Removing jmxetric from childopts.">
+            <type>storm-site</type>
+            <regex-replace key="content" find=" -javaagent:.*JVM" replace-with=""/>
+          </definition>
         </changes>
       </component>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852f33b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
index 0b5e8c1..c9e90a9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
@@ -333,6 +333,10 @@
         <task xsi:type="configure" id="hdp_2_6_maint_ranger_storm_plugin_cluster_name"/>
       </execute-stage>
 
+      <execute-stage service="STORM" component="NIMBUS" title="Removing jmxetric from childopts">
+        <task xsi:type="configure" id="storm_remove_jmxetric"/>
+      </execute-stage>
+
       <!-- YARN -->
       <execute-stage service="YARN" component="RESOURCEMANAGER" title="Apply config changes for Ranger Yarn plugin">
         <task xsi:type="configure" id="hdp_2_6_maint_ranger_yarn_plugin_cluster_name"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852f33b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
index 00597c6..176143c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
@@ -979,6 +979,7 @@
       <component name="NIMBUS">
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_6_maint_ranger_storm_plugin_cluster_name"/>
+          <task xsi:type="configure" id="storm_remove_jmxetric"/>
         </pre-upgrade>
         <pre-downgrade/> <!--  no-op to prevent config changes on downgrade -->
         <upgrade>


[03/50] [abbrv] ambari git commit: AMBARI-22213. "ambari-server upgrade" failed on db schema [Upgrade] (dlysnichenko)

Posted by ao...@apache.org.
AMBARI-22213. "ambari-server upgrade" failed on db schema [Upgrade] (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b86f53fb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b86f53fb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b86f53fb

Branch: refs/heads/branch-3.0-perf
Commit: b86f53fbe1c940811c3e49d658aad870d1dd53da
Parents: a8caac3
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Thu Oct 12 13:57:24 2017 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Thu Oct 12 13:58:23 2017 +0300

----------------------------------------------------------------------
 .../apache/ambari/server/orm/DBAccessorImpl.java    | 16 ++++++++++++----
 1 file changed, 12 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b86f53fb/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
index 26670fc..faa58f2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
@@ -1551,8 +1551,12 @@ public class DBAccessorImpl implements DBAccessor {
    */
   @Override
   public void clearTable(String tableName) throws SQLException {
-    String sqlQuery = "DELETE FROM " + convertObjectName(tableName);
-    executeQuery(sqlQuery);
+    if (tableExists(tableName)){
+      String sqlQuery = "DELETE FROM " + convertObjectName(tableName);
+      executeQuery(sqlQuery);
+    } else {
+      LOG.warn("{} table doesn't exists, skipping", tableName);
+    }
   }
 
   /**
@@ -1564,7 +1568,11 @@ public class DBAccessorImpl implements DBAccessor {
    */
   @Override
   public void clearTableColumn(String tableName, String columnName, Object value) throws SQLException {
-    String sqlQuery = String.format("UPDATE %s SET %s = ?", convertObjectName(tableName), convertObjectName(columnName));
-    executePreparedUpdate(sqlQuery, value);
+    if (tableExists(tableName)){
+      String sqlQuery = String.format("UPDATE %s SET %s = ?", convertObjectName(tableName), convertObjectName(columnName));
+      executePreparedUpdate(sqlQuery, value);
+    } else {
+      LOG.warn("{} table doesn't exists, skipping", tableName);
+    }
   }
 }


[19/50] [abbrv] ambari git commit: AMBARI-22243. Apache Hive 2 LLAP cluster doesn't have the metastore warehouse directory. (stoader)

Posted by ao...@apache.org.
AMBARI-22243. Apache Hive 2 LLAP cluster doesn't have the metastore warehouse directory. (stoader)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/43fb5976
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/43fb5976
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/43fb5976

Branch: refs/heads/branch-3.0-perf
Commit: 43fb59761595431b2a8832bafe1bfcb2fa85a1fa
Parents: 8c017c1
Author: Toader, Sebastian <st...@hortonworks.com>
Authored: Sat Oct 14 07:45:22 2017 +0200
Committer: Toader, Sebastian <st...@hortonworks.com>
Committed: Sun Oct 15 07:43:51 2017 +0200

----------------------------------------------------------------------
 .../0.12.0.2.0/package/scripts/hive_interactive.py  | 13 +++++++++++++
 .../2.1.0.3.0/package/scripts/hive_interactive.py   | 13 +++++++++++++
 .../python/stacks/2.5/HIVE/test_hive_server_int.py  | 16 ++++++++++++++++
 3 files changed, 42 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/43fb5976/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
index 2ed3e3a..89060be 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
@@ -62,6 +62,19 @@ def hive_interactive(name=None):
   import params
   MB_TO_BYTES = 1048576
 
+  # if warehouse directory is in DFS
+  if not params.whs_dir_protocol or params.whs_dir_protocol == urlparse(params.default_fs).scheme:
+    # Create Hive Metastore Warehouse Dir
+    params.HdfsResource(params.hive_apps_whs_dir,
+                        type="directory",
+                        action="create_on_execute",
+                        owner=params.hive_user,
+                        group=params.user_group,
+                        mode=params.hive_apps_whs_mode
+                        )
+  else:
+    Logger.info(format("Not creating warehouse directory '{hive_apps_whs_dir}', as the location is not in DFS."))
+
   # Create Hive User Dir
   params.HdfsResource(params.hive_hdfs_user_dir,
                       type="directory",

http://git-wip-us.apache.org/repos/asf/ambari/blob/43fb5976/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_interactive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_interactive.py
index 2ed3e3a..89060be 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_interactive.py
@@ -62,6 +62,19 @@ def hive_interactive(name=None):
   import params
   MB_TO_BYTES = 1048576
 
+  # if warehouse directory is in DFS
+  if not params.whs_dir_protocol or params.whs_dir_protocol == urlparse(params.default_fs).scheme:
+    # Create Hive Metastore Warehouse Dir
+    params.HdfsResource(params.hive_apps_whs_dir,
+                        type="directory",
+                        action="create_on_execute",
+                        owner=params.hive_user,
+                        group=params.user_group,
+                        mode=params.hive_apps_whs_mode
+                        )
+  else:
+    Logger.info(format("Not creating warehouse directory '{hive_apps_whs_dir}', as the location is not in DFS."))
+
   # Create Hive User Dir
   params.HdfsResource(params.hive_hdfs_user_dir,
                       type="directory",

http://git-wip-us.apache.org/repos/asf/ambari/blob/43fb5976/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
index 3dc78ab..4951c7e 100644
--- a/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
+++ b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
@@ -418,6 +418,22 @@ class TestHiveServerInteractive(RMFTestCase):
 
   def assert_configure_default(self, no_tmp=False, default_fs_default=u'hdfs://c6401.ambari.apache.org:8020', with_cs_enabled=False):
 
+    self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
+                              immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
+                              security_enabled = False,
+                              hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              keytab = UnknownConfigurationMock(),
+                              kinit_path_local = '/usr/bin/kinit',
+                              user = 'hdfs',
+                              dfs_type = '',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              type = 'directory',
+                              action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
+                              mode = 0777,
+    )
+
     self.assertResourceCalled('HdfsResource', '/user/hive',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,


[37/50] [abbrv] ambari git commit: AMBARI-22258. Use correct property attribute setter fn(). while setting 'tez.runtime.io.sort.mb' maximum value.

Posted by ao...@apache.org.
AMBARI-22258. Use correct property attribute setter fn(). while setting 'tez.runtime.io.sort.mb' maximum value.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b9f26708
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b9f26708
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b9f26708

Branch: refs/heads/branch-3.0-perf
Commit: b9f26708dd19c444918ea9b21150f66236fcdf2d
Parents: c924ebd
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Tue Oct 17 13:03:31 2017 -0700
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Tue Oct 17 13:04:52 2017 -0700

----------------------------------------------------------------------
 .../src/main/resources/stacks/HDP/2.5/services/stack_advisor.py | 3 ++-
 .../src/test/python/stacks/2.5/common/test_stack_advisor.py     | 5 ++++-
 2 files changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b9f26708/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index 92ce9b9..b6f2478 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -860,6 +860,7 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     putHiveInteractiveEnvProperty = self.putProperty(configurations, "hive-interactive-env", services)
     putHiveInteractiveEnvPropertyAttribute = self.putPropertyAttribute(configurations, "hive-interactive-env")
     putTezInteractiveSiteProperty = self.putProperty(configurations, "tez-interactive-site", services)
+    putTezInteractiveSitePropertyAttribute = self.putPropertyAttribute(configurations, "tez-interactive-site")
     llap_daemon_selected_queue_name = None
     selected_queue_is_ambari_managed_llap = None  # Queue named 'llap' at root level is Ambari managed.
     llap_selected_queue_am_percent = None
@@ -1331,7 +1332,7 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     putTezInteractiveSiteProperty('tez.runtime.io.sort.mb', tez_runtime_io_sort_mb)
     if "tez-site" in services["configurations"] and "tez.runtime.sorter.class" in services["configurations"]["tez-site"]["properties"]:
       if services["configurations"]["tez-site"]["properties"]["tez.runtime.sorter.class"] == "LEGACY":
-        putTezInteractiveSiteProperty("tez.runtime.io.sort.mb", "maximum", 1800)
+        putTezInteractiveSitePropertyAttribute("tez.runtime.io.sort.mb", "maximum", 1800)
 
     putTezInteractiveSiteProperty('tez.runtime.unordered.output.buffer.size-mb', tez_runtime_unordered_output_buffer_size)
     putHiveInteractiveSiteProperty('hive.auto.convert.join.noconditionaltask.size', hive_auto_convert_join_noconditionaltask_size)

http://git-wip-us.apache.org/repos/asf/ambari/blob/b9f26708/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
index cf462de..407e78d 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
@@ -4446,7 +4446,8 @@ class TestHDP25StackAdvisor(TestCase):
           },
         "tez-site": {
           "properties": {
-            "tez.am.resource.memory.mb": "1024"
+            "tez.am.resource.memory.mb": "1024",
+            "tez.runtime.sorter.class": "LEGACY"
           }
         },
       }
@@ -4481,6 +4482,8 @@ class TestHDP25StackAdvisor(TestCase):
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.io.memory.size'], '186368')
     self.assertEqual(configurations['hive-interactive-env']['properties']['llap_heap_size'], '9830')
+    self.assertEqual(configurations['tez-interactive-site']['properties']['tez.runtime.io.sort.mb'], '1092')
+    self.assertEquals(configurations['tez-interactive-site']['property_attributes']['tez.runtime.io.sort.mb'], {'maximum': '1800'})
 
 
 


[29/50] [abbrv] ambari git commit: AMBARI-22238 : Maven cleanup of ambari utility, logsearch and ambari metrics modules. (Commit 2) (avijayan)

Posted by ao...@apache.org.
AMBARI-22238 : Maven cleanup of ambari utility, logsearch and ambari metrics modules. (Commit 2) (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/75102dc4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/75102dc4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/75102dc4

Branch: refs/heads/branch-3.0-perf
Commit: 75102dc4c8eb08772568cfd085682fe30f8289c6
Parents: 4a22350
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Mon Oct 16 15:58:47 2017 -0700
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Mon Oct 16 15:58:47 2017 -0700

----------------------------------------------------------------------
 ambari-infra/ambari-infra-assembly/pom.xml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/75102dc4/ambari-infra/ambari-infra-assembly/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-assembly/pom.xml b/ambari-infra/ambari-infra-assembly/pom.xml
index 2b4959b..bf0e679 100644
--- a/ambari-infra/ambari-infra-assembly/pom.xml
+++ b/ambari-infra/ambari-infra-assembly/pom.xml
@@ -120,9 +120,9 @@
                       <sources>
                         <source>
                           <location>${solr.client.dir}/target/package</location>
-                          <exclude>
+                          <excludes>
                             <exclude>libs/checkstyle*.jar</exclude>
-                          </exclude>
+                          </excludes>
                         </source>
                       </sources>
                     </mapping>


[23/50] [abbrv] ambari git commit: AMBARI-22232 : ADDENDUM Need to add a new property to support proxy users for Atlas service (Vishal Suvagia via mugdha).

Posted by ao...@apache.org.
AMBARI-22232 : ADDENDUM Need to add a new property to support proxy users for Atlas service (Vishal Suvagia via mugdha).


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d4458daa
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d4458daa
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d4458daa

Branch: refs/heads/branch-3.0-perf
Commit: d4458daa9bf5209eb2f86409185221cdbd749b13
Parents: e219186
Author: Vishal Suvagia <vi...@yahoo.com>
Authored: Mon Oct 16 15:16:17 2017 +0530
Committer: Mugdha Varadkar <mu...@apache.org>
Committed: Mon Oct 16 15:35:35 2017 +0530

----------------------------------------------------------------------
 .../upgrades/AtlasProxyUserConfigCalculation.java  | 17 +++++++++++++++++
 .../AtlasProxyUserConfigCalculationTest.java       | 17 +++++++++++++++++
 2 files changed, 34 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d4458daa/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculation.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculation.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculation.java
index 85fb200..40d64b5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculation.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculation.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.ambari.server.serveraction.upgrades;
 
 import org.apache.ambari.server.AmbariException;

http://git-wip-us.apache.org/repos/asf/ambari/blob/d4458daa/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculationTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculationTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculationTest.java
index 3f8bca9..33ec7f3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculationTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculationTest.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.ambari.server.serveraction.upgrades;
 
 import com.google.inject.Injector;


[41/50] [abbrv] ambari git commit: AMBARI-22265. Mahout service check failure after patch upgrade (ncole)

Posted by ao...@apache.org.
AMBARI-22265. Mahout service check failure after patch upgrade (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/29d11392
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/29d11392
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/29d11392

Branch: refs/heads/branch-3.0-perf
Commit: 29d1139235ddfa9e21909aa188ef2dcb30014bfb
Parents: 84342f6
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Oct 18 13:34:43 2017 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Oct 18 13:34:43 2017 -0400

----------------------------------------------------------------------
 .../resources/stacks/HDP/2.0.6/properties/stack_packages.json     | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/29d11392/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
index b8655d7..20b12a9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
@@ -1177,7 +1177,8 @@
     },
     "upgrade-dependencies" : {
       "YARN": ["TEZ"],
-      "TEZ": ["YARN"]
+      "TEZ": ["YARN"],
+      "MAHOUT": ["MAPREDUCE2"]
     }    
   }
 }


[02/50] [abbrv] ambari git commit: AMBARI-22131 Move resources/stacks/HDP/3.0/widgets.json to resources/widgets.json (additional patch) (dsen)

Posted by ao...@apache.org.
AMBARI-22131 Move resources/stacks/HDP/3.0/widgets.json to resources/widgets.json (additional patch) (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a8caac37
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a8caac37
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a8caac37

Branch: refs/heads/branch-3.0-perf
Commit: a8caac375a66911ffb13f38093731158238177ec
Parents: c3f6ba7
Author: Dmytro Sen <ds...@apache.org>
Authored: Thu Oct 12 13:54:07 2017 +0300
Committer: Dmytro Sen <ds...@apache.org>
Committed: Thu Oct 12 13:54:07 2017 +0300

----------------------------------------------------------------------
 ambari-server/src/main/assemblies/server.xml | 5 +++++
 1 file changed, 5 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a8caac37/ambari-server/src/main/assemblies/server.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/assemblies/server.xml b/ambari-server/src/main/assemblies/server.xml
index 37283fa..cbf11c5 100644
--- a/ambari-server/src/main/assemblies/server.xml
+++ b/ambari-server/src/main/assemblies/server.xml
@@ -397,6 +397,11 @@
       <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
     </file>
     <file>
+      <fileMode>755</fileMode>
+      <source>src/main/resources/widgets.json</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
       <fileMode>644</fileMode>
       <source>src/main/resources/slider_resources/README.txt</source>
       <outputDirectory>/var/lib/ambari-server/resources/apps</outputDirectory>


[20/50] [abbrv] ambari git commit: AMBARI-22238 : Maven cleanup of ambari utility, logsearch and ambari metrics modules. (avijayan)

Posted by ao...@apache.org.
AMBARI-22238 : Maven cleanup of ambari utility, logsearch and ambari metrics modules. (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/499fec3c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/499fec3c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/499fec3c

Branch: refs/heads/branch-3.0-perf
Commit: 499fec3c1ec7ddd18fae3a415da7e4d5de82d63b
Parents: 43fb597
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Sun Oct 15 09:41:13 2017 -0700
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Sun Oct 15 09:41:13 2017 -0700

----------------------------------------------------------------------
 ambari-infra/ambari-infra-assembly/pom.xml            |  4 ++++
 ambari-logsearch/ambari-logsearch-assembly/pom.xml    |  4 +++-
 ambari-logsearch/ambari-logsearch-logfeeder/pom.xml   |  4 ++++
 ambari-logsearch/ambari-logsearch-server/pom.xml      |  4 ++++
 ambari-metrics/ambari-metrics-assembly/pom.xml        |  2 ++
 ambari-metrics/ambari-metrics-common/pom.xml          |  6 ++++++
 ambari-metrics/ambari-metrics-timelineservice/pom.xml | 10 ++++++++++
 ambari-utility/pom.xml                                |  6 ++++++
 8 files changed, 39 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/499fec3c/ambari-infra/ambari-infra-assembly/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-assembly/pom.xml b/ambari-infra/ambari-infra-assembly/pom.xml
index dfb5c01..2b4959b 100644
--- a/ambari-infra/ambari-infra-assembly/pom.xml
+++ b/ambari-infra/ambari-infra-assembly/pom.xml
@@ -120,6 +120,9 @@
                       <sources>
                         <source>
                           <location>${solr.client.dir}/target/package</location>
+                          <exclude>
+                            <exclude>libs/checkstyle*.jar</exclude>
+                          </exclude>
                         </source>
                       </sources>
                     </mapping>
@@ -324,6 +327,7 @@
                         <group>root</group>
                         <prefix>${solr.client.mapping.path}</prefix>
                       </mapper>
+                      <excludes>libs/checkstyle*.jar</excludes>
                     </data>
                   </dataSet>
                 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/499fec3c/ambari-logsearch/ambari-logsearch-assembly/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-assembly/pom.xml b/ambari-logsearch/ambari-logsearch-assembly/pom.xml
index e8a9426..cbc62ce 100644
--- a/ambari-logsearch/ambari-logsearch-assembly/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-assembly/pom.xml
@@ -94,6 +94,7 @@
                             <exclude>classes/user_pass.json</exclude>
                             <exclude>classes/HadoopServiceConfig.json</exclude>
                             <exclude>solr_configsets/**</exclude>
+                            <exclude>libs/checkstyle*.jar</exclude>
                           </excludes>
                         </source>
                       </sources>
@@ -241,7 +242,7 @@
                         <group>root</group>
                       </mapper>
                       <excludes>
-                        classes/log4j.xml,classes/logsearch.properties,classes/user_pass.json,classes/HadoopServiceConfig.json,solr_configsets/**
+                        classes/log4j.xml,classes/logsearch.properties,classes/user_pass.json,classes/HadoopServiceConfig.json,solr_configsets/**,libs/checkstyle*.jar
                       </excludes>
                     </data>
                     <data>
@@ -267,6 +268,7 @@
                         <user>root</user>
                         <group>root</group>
                       </mapper>
+                      <excludes>libs/checkstyle*.jar</excludes>
                     </data>
                   </dataSet>
                 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/499fec3c/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml b/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
index 091f957..b1b6ece 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
@@ -163,6 +163,10 @@
           <groupId>org.apache.curator</groupId>
           <artifactId>curator-recipes</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>jdk.tools</groupId>
+          <artifactId>jdk.tools</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/ambari/blob/499fec3c/ambari-logsearch/ambari-logsearch-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/pom.xml b/ambari-logsearch/ambari-logsearch-server/pom.xml
index e90c58d..2ad35f5 100755
--- a/ambari-logsearch/ambari-logsearch-server/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-server/pom.xml
@@ -380,6 +380,10 @@
           <artifactId>guava</artifactId>
           <groupId>com.google.guava</groupId>
         </exclusion>
+        <exclusion>
+          <groupId>jdk.tools</groupId>
+          <artifactId>jdk.tools</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/ambari/blob/499fec3c/ambari-metrics/ambari-metrics-assembly/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-assembly/pom.xml b/ambari-metrics/ambari-metrics-assembly/pom.xml
index d9875ce..9925947 100644
--- a/ambari-metrics/ambari-metrics-assembly/pom.xml
+++ b/ambari-metrics/ambari-metrics-assembly/pom.xml
@@ -245,6 +245,7 @@
                           <excludes>
                             <exclude>*tests.jar</exclude>
                             <exclude>findbugs*.jar</exclude>
+                            <exclude>jdk.tools*.jar</exclude>
                           </excludes>
                         </source>
                         <source>
@@ -265,6 +266,7 @@
                             <exclude>bin/*</exclude>
                             <exclude>lib/*tests.jar</exclude>
                             <exclude>lib/findbugs*.jar</exclude>
+                            <exclude>lib/jdk.tools*.jar</exclude>
                           </excludes>
                         </source>
                       </sources>

http://git-wip-us.apache.org/repos/asf/ambari/blob/499fec3c/ambari-metrics/ambari-metrics-common/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/pom.xml b/ambari-metrics/ambari-metrics-common/pom.xml
index cae9734..aa16ba9 100644
--- a/ambari-metrics/ambari-metrics-common/pom.xml
+++ b/ambari-metrics/ambari-metrics-common/pom.xml
@@ -155,6 +155,12 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-annotations</artifactId>
       <version>2.6.0</version>
+      <exclusions>
+        <exclusion>
+          <groupId>jdk.tools</groupId>
+          <artifactId>jdk.tools</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.codehaus.jackson</groupId>

http://git-wip-us.apache.org/repos/asf/ambari/blob/499fec3c/ambari-metrics/ambari-metrics-timelineservice/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/pom.xml b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
index a5eb572..ca72bc7 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/pom.xml
+++ b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
@@ -326,6 +326,10 @@
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-annotations</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>net.sourceforge.findbugs</groupId>
+          <artifactId>annotations</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
 
@@ -377,6 +381,12 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-annotations</artifactId>
       <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>jdk.tools</groupId>
+          <artifactId>jdk.tools</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>

http://git-wip-us.apache.org/repos/asf/ambari/blob/499fec3c/ambari-utility/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-utility/pom.xml b/ambari-utility/pom.xml
index 1e86d40..4a32872 100644
--- a/ambari-utility/pom.xml
+++ b/ambari-utility/pom.xml
@@ -55,6 +55,12 @@
     <dependency>
       <groupId>com.puppycrawl.tools</groupId>
       <artifactId>checkstyle</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>com.sun</groupId>
+          <artifactId>tools</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>com.puppycrawl.tools</groupId>


[42/50] [abbrv] ambari git commit: Updated team page. (yusaku)

Posted by ao...@apache.org.
Updated team page. (yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c9c96cdb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c9c96cdb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c9c96cdb

Branch: refs/heads/branch-3.0-perf
Commit: c9c96cdb881e59d8a3dfe19e32069adf05fdf445
Parents: 29d1139
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Wed Oct 18 11:08:50 2017 -0700
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Wed Oct 18 11:08:50 2017 -0700

----------------------------------------------------------------------
 docs/pom.xml | 12 ++++++++++++
 1 file changed, 12 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c9c96cdb/docs/pom.xml
----------------------------------------------------------------------
diff --git a/docs/pom.xml b/docs/pom.xml
index a75e093..8af497e 100644
--- a/docs/pom.xml
+++ b/docs/pom.xml
@@ -186,6 +186,18 @@
             </organization>
         </developer>
         <developer>
+            <id>amruta</id>
+            <name>Amruta R Borkar</name>
+            <email>amruta@apache.org</email>
+            <timezone></timezone>
+            <roles>
+                <role>Committer</role>
+            </roles>
+            <organization>
+                IBM
+            </organization>
+        </developer>
+        <developer>
             <id>aonishuk</id>
             <name>Andrew Onischuk</name>
             <email>aonishuk@apache.org</email>


[49/50] [abbrv] ambari git commit: Merge remote-tracking branch 'remotes/origin/trunk' into branch-3.0-perf

Posted by ao...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/state/Host.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/state/Host.java
index 1c2f501,3a78824..bb227cd
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Host.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Host.java
@@@ -414,5 -409,19 +415,21 @@@ public interface Host extends Comparabl
     */
    boolean hasComponentsAdvertisingVersions(StackId stackId) throws AmbariException;
  
 +  void calculateHostStatus(Long clusterId) throws AmbariException;
++
+   /**
+    * Gets whether all host components whose desired repository version matches
+    * the repository version specified have reported the correct version and are
+    * no longer upgrading.
+    *
+    * @param repositoryVersion
+    *          the repository version to check for (not {@code null}).
+    * @return {@code true} if all components on this host have checked in with
+    *         the correct version if their desired repository matches the one
+    *         specified.
+    *
+    * @throws AmbariException
+    */
+   boolean isRepositoryVersionCorrect(RepositoryVersionEntity repositoryVersion)
+       throws AmbariException;
  }

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
index 8a659af,4af83ef..5c615b2
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
@@@ -67,11 -67,8 +70,12 @@@ import org.apache.ambari.server.state.H
  import org.apache.ambari.server.state.HostHealthStatus.HealthStatus;
  import org.apache.ambari.server.state.HostState;
  import org.apache.ambari.server.state.MaintenanceState;
 +import org.apache.ambari.server.state.Service;
 +import org.apache.ambari.server.state.ServiceComponent;
 +import org.apache.ambari.server.state.ServiceComponentHost;
  import org.apache.ambari.server.state.StackId;
 +import org.apache.ambari.server.state.State;
+ import org.apache.ambari.server.state.UpgradeState;
  import org.apache.ambari.server.state.configgroup.ConfigGroup;
  import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
  import org.apache.ambari.server.state.fsm.SingleArcTransition;
@@@ -1191,92 -1173,44 +1195,131 @@@ public class HostImpl implements Host 
      return false;
    }
  
 +  public void restoreComponentsStatuses() throws AmbariException {
 +    Long clusterId = null;
 +    for (Cluster cluster : clusters.getClustersForHost(getHostName())) {
 +      clusterId = cluster.getClusterId();
 +      for (ServiceComponentHost sch : cluster.getServiceComponentHosts(getHostName())) {
 +        Service s = cluster.getService(sch.getServiceName());
 +        ServiceComponent sc = s.getServiceComponent(sch.getServiceComponentName());
 +        if (!sc.isClientComponent() &&
 +            sch.getState().equals(State.UNKNOWN)) {
 +          State lastValidState = sch.getLastValidState();
 +          LOG.warn("Restore component state to last valid state for component " + sc.getName() + " on " +
 +              getHostName() + " to " + lastValidState);
 +          sch.setState(lastValidState);
 +        }
 +      }
 +    }
 +    //TODO
 +    if (clusterId != null) {
 +      calculateHostStatus(clusterId);
 +    }
 +  }
 +
 +  @Override
 +  public void calculateHostStatus(Long clusterId) throws AmbariException {
 +    //Use actual component status to compute the host status
 +    int masterCount = 0;
 +    int mastersRunning = 0;
 +    int slaveCount = 0;
 +    int slavesRunning = 0;
 +
 +    StackId stackId;
 +    Cluster cluster = clusters.getCluster(clusterId);
 +    stackId = cluster.getDesiredStackVersion();
 +
 +
 +    List<ServiceComponentHost> scHosts = cluster.getServiceComponentHosts(hostName);
 +    for (ServiceComponentHost scHost : scHosts) {
 +      ComponentInfo componentInfo =
 +          ambariMetaInfo.getComponent(stackId.getStackName(),
 +              stackId.getStackVersion(), scHost.getServiceName(),
 +              scHost.getServiceComponentName());
 +
 +      String status = scHost.getState().name();
 +
 +      String category = componentInfo.getCategory();
 +
 +      if (MaintenanceState.OFF == maintenanceStateHelper.getEffectiveState(scHost, this)) {
 +        if (category.equals("MASTER")) {
 +          ++masterCount;
 +          if (status.equals("STARTED")) {
 +            ++mastersRunning;
 +          }
 +        } else if (category.equals("SLAVE")) {
 +          ++slaveCount;
 +          if (status.equals("STARTED")) {
 +            ++slavesRunning;
 +          }
 +        }
 +      }
 +    }
 +
 +    HostHealthStatus.HealthStatus healthStatus;
 +    if (masterCount == mastersRunning && slaveCount == slavesRunning) {
 +      healthStatus = HostHealthStatus.HealthStatus.HEALTHY;
 +    } else if (masterCount > 0 && mastersRunning < masterCount) {
 +      healthStatus = HostHealthStatus.HealthStatus.UNHEALTHY;
 +    } else {
 +      healthStatus = HostHealthStatus.HealthStatus.ALERT;
 +    }
 +
 +    setStatus(healthStatus.name());
 +  }
 +
 +  @Transactional
 +  public void updateHost(HostRegistrationRequestEvent e) {
 +    importHostInfo(e.hostInfo);
 +    setLastRegistrationTime(e.registrationTime);
 +    //Initialize heartbeat time and timeInState with registration time.
 +    setLastHeartbeatTime(e.registrationTime);
 +    setLastAgentEnv(e.agentEnv);
 +    setTimeInState(e.registrationTime);
 +    setAgentVersion(e.agentVersion);
 +    setPublicHostName(e.publicHostName);
 +    setTimeInState(System.currentTimeMillis());
 +    setState(HostState.INIT);
 +  }
++
+   /**
+    * {@inheritDoc}
+    */
+   @Override
+   public boolean isRepositoryVersionCorrect(RepositoryVersionEntity repositoryVersion)
+       throws AmbariException {
+     HostEntity hostEntity = getHostEntity();
+     Collection<HostComponentStateEntity> hostComponentStates = hostEntity.getHostComponentStateEntities();
+ 
+     // for every host component, if it matches the desired repo and has reported
+     // the correct version then we're good
+     for (HostComponentStateEntity hostComponentState : hostComponentStates) {
+       ServiceComponentDesiredStateEntity desiredComponmentState = hostComponentState.getServiceComponentDesiredStateEntity();
+       RepositoryVersionEntity desiredRepositoryVersion = desiredComponmentState.getDesiredRepositoryVersion();
+ 
+       ComponentInfo componentInfo = ambariMetaInfo.getComponent(
+           desiredRepositoryVersion.getStackName(), desiredRepositoryVersion.getStackVersion(),
+           hostComponentState.getServiceName(), hostComponentState.getComponentName());
+ 
+       // skip components which don't advertise a version
+       if (!componentInfo.isVersionAdvertised()) {
+         continue;
+       }
+ 
+       // we only care about checking the specified repo version for this host
+       if (!repositoryVersion.equals(desiredRepositoryVersion)) {
+         continue;
+       }
+ 
+       String versionAdvertised = hostComponentState.getVersion();
+       if (hostComponentState.getUpgradeState() == UpgradeState.IN_PROGRESS
+           || !StringUtils.equals(versionAdvertised, repositoryVersion.getVersion())) {
+         return false;
+       }
+     }
+ 
+     return true;
+   }
  }
  
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
index 7211dcd,3b8f6da..22bcfc7
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
@@@ -1256,33 -1175,22 +1256,43 @@@ public class ServiceComponentHostImpl i
      r.setActualConfigs(actualConfigs);
      r.setUpgradeState(upgradeState);
  
 -    try {
 -      r.setStaleConfig(helper.isStaleConfigs(this, desiredConfigs, hostComponentDesiredStateEntity));
 -    } catch (Exception e) {
 -      LOG.error("Could not determine stale config", e);
 +    return r;
 +  }
 +
 +  @Override
 +  public ServiceComponentHostResponse convertToResponseStatusOnly(Map<String, DesiredConfig> desiredConfigs,
 +                                                                  boolean collectStaleConfigsStatus) {
 +    String clusterName = serviceComponent.getClusterName();
 +    String serviceName = serviceComponent.getServiceName();
 +    String serviceComponentName = serviceComponent.getName();
 +    String state = getState().toString();
 +
 +    ServiceComponentHostResponse r = new ServiceComponentHostResponse(clusterName, serviceName,
 +        serviceComponentName, null, hostName, null, state, null,
 +        null, null, null, null);
 +
 +    if (collectStaleConfigsStatus) {
 +
 +      try {
 +        HostComponentDesiredStateEntity hostComponentDesiredStateEntity = getDesiredStateEntity();
 +        r.setStaleConfig(helper.isStaleConfigs(this, desiredConfigs, hostComponentDesiredStateEntity));
 +      } catch (Exception e) {
 +        LOG.error("Could not determine stale config", e);
 +      }
 +    } else {
 +      r.setStaleConfig(false);
      }
  
+     try {
+       Cluster cluster = clusters.getCluster(clusterName);
+       ServiceComponent serviceComponent = cluster.getService(serviceName).getServiceComponent(serviceComponentName);
+       ServiceComponentHost sch = serviceComponent.getServiceComponentHost(hostName);
+       String refreshConfigsCommand = helper.getRefreshConfigsCommand(cluster,sch);
+       r.setReloadConfig(refreshConfigsCommand != null);
+     } catch (Exception e) {
+       LOG.error("Could not determine reload config flag", e);
+     }
+ 
      return r;
    }
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
index 0000000,fd1cde6..8430ced
mode 000000,100644..100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
@@@ -1,0 -1,200 +1,200 @@@
+ #!/usr/bin/env python
+ """
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+ 
+     http://www.apache.org/licenses/LICENSE-2.0
+ 
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ 
+ """
+ from ambari_commons import OSCheck
+ from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages
+ from resource_management.libraries.functions import conf_select
+ from resource_management.libraries.functions import stack_select
+ from resource_management.libraries.resources.hdfs_resource import HdfsResource
+ from resource_management.libraries.functions import get_kinit_path
+ from resource_management.libraries.script.script import Script
+ from resource_management.libraries.functions import format
+ from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
+ from resource_management.libraries.functions.default import default
+ from ambari_commons.constants import AMBARI_SUDO_BINARY
+ 
+ import status_params
+ 
+ # a map of the Ambari role to the component name
+ # for use with <stack-root>/current/<component>
+ SERVER_ROLE_DIRECTORY_MAP = {
+   'DRUID_BROKER': 'druid-broker',
+   'DRUID_COORDINATOR': 'druid-coordinator',
+   'DRUID_HISTORICAL': 'druid-historical',
+   'DRUID_MIDDLEMANAGER': 'druid-middlemanager',
+   'DRUID_OVERLORD': 'druid-overlord',
+   'DRUID_ROUTER': 'druid-router'
+ }
+ 
+ # server configurations
+ config = Script.get_config()
+ stack_root = Script.get_stack_root()
+ tmp_dir = Script.get_tmp_dir()
+ 
 -stack_name = default("/hostLevelParams/stack_name", None)
++stack_name = default("/clusterLevelParams/stack_name", None)
+ 
+ # stack version
+ stack_version = default("/commandParams/version", None)
+ 
+ # un-formatted stack version
 -stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
++stack_version_unformatted = str(config['clusterLevelParams']['stack_version'])
+ 
+ # default role to coordinator needed for service checks
+ component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "DRUID_COORDINATOR")
+ 
 -hostname = config['hostname']
++hostname = config['agentLevelParams']['hostname']
+ sudo = AMBARI_SUDO_BINARY
+ 
+ # default druid parameters
+ druid_home = format("{stack_root}/current/{component_directory}")
+ druid_conf_dir = format("{stack_root}/current/{component_directory}/conf")
+ 
+ druid_common_conf_dir = druid_conf_dir + "/_common"
+ druid_coordinator_conf_dir = druid_conf_dir + "/coordinator"
+ druid_overlord_conf_dir = druid_conf_dir + "/overlord"
+ druid_broker_conf_dir = druid_conf_dir + "/broker"
+ druid_historical_conf_dir = druid_conf_dir + "/historical"
+ druid_middlemanager_conf_dir = druid_conf_dir + "/middleManager"
+ druid_router_conf_dir = druid_conf_dir + "/router"
+ druid_extensions_dir = druid_home + "/extensions"
+ druid_hadoop_dependencies_dir = druid_home + "/hadoop-dependencies"
+ druid_segment_infoDir = config['configurations']['druid-historical']['druid.segmentCache.infoDir']
+ druid_segment_cache_locations = config['configurations']['druid-historical']['druid.segmentCache.locations']
+ druid_tasks_dir = config['configurations']['druid-middlemanager']['druid.indexer.task.baseTaskDir']
+ druid_user = config['configurations']['druid-env']['druid_user']
+ druid_log_dir = config['configurations']['druid-env']['druid_log_dir']
+ druid_classpath = config['configurations']['druid-env']['druid_classpath']
+ druid_extensions = config['configurations']['druid-common']['druid.extensions.pullList']
+ druid_repo_list = config['configurations']['druid-common']['druid.extensions.repositoryList']
+ druid_extensions_load_list = config['configurations']['druid-common']['druid.extensions.loadList']
+ druid_security_extensions_load_list = config['configurations']['druid-common']['druid.security.extensions.loadList']
+ 
+ 
+ # status params
+ druid_pid_dir = status_params.druid_pid_dir
+ user_group = config['configurations']['cluster-env']['user_group']
 -java8_home = config['hostLevelParams']['java_home']
++java8_home = config['ambariLevelParams']['java_home']
+ druid_env_sh_template = config['configurations']['druid-env']['content']
+ 
+ # log4j params
+ log4j_props = config['configurations']['druid-log4j']['content']
+ druid_log_level = config['configurations']['druid-log4j']['druid_log_level']
+ metamx_log_level = config['configurations']['druid-log4j']['metamx_log_level']
+ root_log_level = config['configurations']['druid-log4j']['root_log_level']
+ 
+ druid_log_maxbackupindex = default('/configurations/druid-logrotate/druid_log_maxbackupindex', 7)
+ druid_log_maxfilesize = default('/configurations/druid-logrotate/druid_log_maxfilesize', 256)
+ logrotate_props = config['configurations']['druid-logrotate']['content']
+ 
+ # Metadata storage
+ metadata_storage_user = config['configurations']['druid-common']['druid.metadata.storage.connector.user']
+ metadata_storage_password = config['configurations']['druid-common']['druid.metadata.storage.connector.password']
+ metadata_storage_db_name = config['configurations']['druid-common']['database_name']
+ metadata_storage_db_name = config['configurations']['druid-common']['database_name']
+ metadata_storage_type = config['configurations']['druid-common']['druid.metadata.storage.type']
+ metadata_storage_url = config['configurations']['druid-common']['druid.metadata.storage.connector.connectURI']
 -jdk_location = config['hostLevelParams']['jdk_location']
++jdk_location = config['ambariLevelParams']['jdk_location']
+ if 'mysql' == metadata_storage_type:
+   jdbc_driver_jar = default("/hostLevelParams/custom_mysql_jdbc_name", None)
+   connector_curl_source = format("{jdk_location}/{jdbc_driver_jar}")
+   connector_download_dir=format("{druid_extensions_dir}/mysql-metadata-storage")
+   downloaded_custom_connector = format("{tmp_dir}/{jdbc_driver_jar}")
+ 
+ check_db_connection_jar_name = "DBConnectionVerification.jar"
+ check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
+ 
+ # HDFS
+ security_enabled = config['configurations']['cluster-env']['security_enabled']
+ hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+ kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+ hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
+ hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+ hdfs_principal_name = default('/configurations/hadoop-env/hdfs_principal_name', 'missing_principal').replace("_HOST",
+                                                                                                              hostname)
+ hdfs_site = config['configurations']['hdfs-site']
+ default_fs = config['configurations']['core-site']['fs.defaultFS']
+ dfs_type = default("/commandParams/dfs_type", "")
+ hdfs_tmp_dir = config['configurations']['hadoop-env']['hdfs_tmp_dir']
+ 
+ # Kerberos
+ druid_principal_name = default('/configurations/druid-common/druid.hadoop.security.kerberos.principal',
+                                'missing_principal')
+ druid_user_keytab = default('/configurations/druid-common/druid.hadoop.security.kerberos.keytab', 'missing_keytab')
+ 
+ import functools
+ 
+ # create partial functions with common arguments for every HdfsResource call
+ # to create hdfs directory we need to call params.HdfsResource in code
+ HdfsResource = functools.partial(
+   HdfsResource,
+   user=hdfs_user,
+   hdfs_resource_ignore_file="/var/lib/ambari-agent/data/.hdfs_resource_ignore",
+   security_enabled=security_enabled,
+   keytab=hdfs_user_keytab,
+   kinit_path_local=kinit_path_local,
+   hadoop_bin_dir=hadoop_bin_dir,
+   hadoop_conf_dir=hadoop_conf_dir,
+   principal_name=hdfs_principal_name,
+   hdfs_site=hdfs_site,
+   default_fs=default_fs,
+   immutable_paths=get_not_managed_resources(),
+   dfs_type=dfs_type
+ )
+ 
+ # Ambari Metrics
+ metric_emitter_type = "noop"
+ metric_collector_host = ""
+ metric_collector_port = ""
+ metric_collector_protocol = ""
+ metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
+ metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
+ metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
+ 
+ ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", [])
+ has_metric_collector = not len(ams_collector_hosts) == 0
+ 
+ if has_metric_collector:
+     metric_emitter_type = "ambari-metrics"
+     if 'cluster-env' in config['configurations'] and \
+                     'metrics_collector_vip_host' in config['configurations']['cluster-env']:
+         metric_collector_host = config['configurations']['cluster-env']['metrics_collector_vip_host']
+     else:
+         metric_collector_host = ams_collector_hosts[0]
+     if 'cluster-env' in config['configurations'] and \
+                     'metrics_collector_vip_port' in config['configurations']['cluster-env']:
+         metric_collector_port = config['configurations']['cluster-env']['metrics_collector_vip_port']
+     else:
+         metric_collector_web_address = default("/configurations/ams-site/timeline.metrics.service.webapp.address", "localhost:6188")
+         if metric_collector_web_address.find(':') != -1:
+             metric_collector_port = metric_collector_web_address.split(':')[1]
+         else:
+             metric_collector_port = '6188'
+     if default("/configurations/ams-site/timeline.metrics.service.http.policy", "HTTP_ONLY") == "HTTPS_ONLY":
+         metric_collector_protocol = 'https'
+     else:
+         metric_collector_protocol = 'http'
+     pass
+ 
+ # Create current Hadoop Clients  Libs
 -stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
++stack_version_unformatted = str(config['clusterLevelParams']['stack_version'])
+ io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None)
+ lzo_enabled = io_compression_codecs is not None and "com.hadoop.compression.lzo" in io_compression_codecs.lower()
+ lzo_packages = get_lzo_packages(stack_version_unformatted)
+ hadoop_lib_home = stack_root + '/' + stack_version + '/hadoop/lib'

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/install_params.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_interactive.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index d60f102,620408b..a5a505f
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@@ -85,9 -87,10 +87,10 @@@ version = default("/commandParams/versi
  stack_supports_ranger_kerberos = check_stack_feature(StackFeature.RANGER_KERBEROS_SUPPORT, version_for_stack_feature_checks)
  stack_supports_ranger_audit_db = check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, version_for_stack_feature_checks)
  
 -hostname = config['hostname']
 +hostname = config['agentLevelParams']['hostname']
  
  # hadoop default parameters
+ hadoop_home = status_params.hadoop_home
  hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
  hadoop_bin = stack_select.get_hadoop_dir("sbin")
  hadoop_bin_dir = stack_select.get_hadoop_dir("bin")

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
index 6a8e99a,6bb528f..b69c531
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
@@@ -19,44 -19,31 +19,32 @@@ limitations under the License
  """
  from resource_management.libraries.script.script import Script
  from resource_management.libraries import functions
 +from resource_management.libraries.functions import conf_select
+ from resource_management.libraries.functions import stack_select
  from resource_management.libraries.functions import format
  from resource_management.libraries.functions.default import default
- from ambari_commons import OSCheck
  
  config = Script.get_config()
  tmp_dir = Script.get_tmp_dir()
  
- if OSCheck.is_windows_family():
-   resourcemanager_win_service_name = 'resourcemanager'
-   nodemanager_win_service_name = 'nodemanager'
-   historyserver_win_service_name = 'historyserver'
-   timelineserver_win_service_name = 'timelineserver'
- 
-   service_map = {
-     'resourcemanager' : resourcemanager_win_service_name,
-     'nodemanager' : nodemanager_win_service_name,
-     'historyserver' : historyserver_win_service_name,
-     'timelineserver' : timelineserver_win_service_name
-   }
- else:
-   mapred_user = config['configurations']['mapred-env']['mapred_user']
-   yarn_user = config['configurations']['yarn-env']['yarn_user']
-   yarn_pid_dir_prefix = config['configurations']['yarn-env']['yarn_pid_dir_prefix']
-   mapred_pid_dir_prefix = config['configurations']['mapred-env']['mapred_pid_dir_prefix']
-   yarn_pid_dir = format("{yarn_pid_dir_prefix}/{yarn_user}")
-   mapred_pid_dir = format("{mapred_pid_dir_prefix}/{mapred_user}")
- 
-   resourcemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-resourcemanager.pid")
-   nodemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-nodemanager.pid")
-   yarn_historyserver_pid_file_old = format("{yarn_pid_dir}/yarn-{yarn_user}-historyserver.pid")
-   yarn_historyserver_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-timelineserver.pid")  # *-historyserver.pid is deprecated
-   mapred_historyserver_pid_file = format("{mapred_pid_dir}/mapred-{mapred_user}-historyserver.pid")
- 
-   hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
- 
-   hostname = config['agentLevelParams']['hostname']
-   kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-   security_enabled = config['configurations']['cluster-env']['security_enabled']
+ mapred_user = config['configurations']['mapred-env']['mapred_user']
+ yarn_user = config['configurations']['yarn-env']['yarn_user']
+ yarn_pid_dir_prefix = config['configurations']['yarn-env']['yarn_pid_dir_prefix']
+ mapred_pid_dir_prefix = config['configurations']['mapred-env']['mapred_pid_dir_prefix']
+ yarn_pid_dir = format("{yarn_pid_dir_prefix}/{yarn_user}")
+ mapred_pid_dir = format("{mapred_pid_dir_prefix}/{mapred_user}")
+ 
+ resourcemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-resourcemanager.pid")
+ nodemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-nodemanager.pid")
+ yarn_historyserver_pid_file_old = format("{yarn_pid_dir}/yarn-{yarn_user}-historyserver.pid")
+ yarn_historyserver_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-timelineserver.pid")  # *-historyserver.pid is deprecated
+ mapred_historyserver_pid_file = format("{mapred_pid_dir}/mapred-{mapred_user}-historyserver.pid")
+ 
+ hadoop_home = stack_select.get_hadoop_dir("home")
 -hadoop_conf_dir = functions.conf_select.get_hadoop_conf_dir()
++hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+ 
 -hostname = config['hostname']
++hostname = config['agentLevelParams']['hostname']
+ kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+ security_enabled = config['configurations']['cluster-env']['security_enabled']
  
 -stack_name = default("/hostLevelParams/stack_name", None)
 +stack_name = default("/clusterLevelParams/stack_name", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
index d28920b,e4dbe2c..9e7a3bb
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
@@@ -65,13 -68,17 +68,17 @@@ tarball_map = default("/configurations/
  config_path = os.path.join(stack_root, "current/hadoop-client/conf")
  config_dir = os.path.realpath(config_path)
  
+ # get the correct version to use for checking stack features
+ version_for_stack_feature_checks = get_stack_feature_version(config)
+ 
  # This is expected to be of the form #.#.#.#
 -stack_version_unformatted = config['hostLevelParams']['stack_version']
 +stack_version_unformatted = config['clusterLevelParams']['stack_version']
  stack_version_formatted_major = format_stack_version(stack_version_unformatted)
  stack_version_formatted = functions.get_stack_version('hadoop-yarn-resourcemanager')
+ major_stack_version = get_major_version(stack_version_formatted_major)
  
- stack_supports_ru = stack_version_formatted_major and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted_major)
- stack_supports_timeline_state_store = stack_version_formatted_major and check_stack_feature(StackFeature.TIMELINE_STATE_STORE, stack_version_formatted_major)
+ stack_supports_ru = check_stack_feature(StackFeature.ROLLING_UPGRADE, version_for_stack_feature_checks)
+ stack_supports_timeline_state_store = check_stack_feature(StackFeature.TIMELINE_STATE_STORE, version_for_stack_feature_checks)
  
  # New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade.
  # It cannot be used during the initial Cluser Install because the version is not yet known.
@@@ -80,12 -87,10 +87,10 @@@ version = default("/commandParams/versi
  stack_supports_ranger_kerberos = check_stack_feature(StackFeature.RANGER_KERBEROS_SUPPORT, version_for_stack_feature_checks)
  stack_supports_ranger_audit_db = check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, version_for_stack_feature_checks)
  
 -hostname = config['hostname']
 +hostname = config['agentLevelParams']['hostname']
  
  # hadoop default parameters
+ hadoop_home = status_params.hadoop_home
  hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
  hadoop_bin = stack_select.get_hadoop_dir("sbin")
  hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
@@@ -365,10 -392,10 +392,10 @@@ cgroups_dir = "/cgroups_test/cpu
  
  # hostname of the active HDFS HA Namenode (only used when HA is enabled)
  dfs_ha_namenode_active = default("/configurations/hadoop-env/dfs_ha_initial_namenode_active", None)
- if dfs_ha_namenode_active is not None: 
+ if dfs_ha_namenode_active is not None:
    namenode_hostname = dfs_ha_namenode_active
  else:
 -  namenode_hostname = config['clusterHostInfo']['namenode_host'][0]
 +  namenode_hostname = config['clusterHostInfo']['namenode_hosts'][0]
  
  ranger_admin_log_dir = default("/configurations/ranger-env/ranger_admin_log_dir","/var/log/ranger/admin")
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/status_params.py
index 846dbdc,d2d1307..ea93840
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/status_params.py
@@@ -26,36 -26,24 +26,24 @@@ from resource_management.libraries.func
  config = Script.get_config()
  tmp_dir = Script.get_tmp_dir()
  
- if OSCheck.is_windows_family():
-   resourcemanager_win_service_name = 'resourcemanager'
-   nodemanager_win_service_name = 'nodemanager'
-   historyserver_win_service_name = 'historyserver'
-   timelineserver_win_service_name = 'timelineserver'
- 
-   service_map = {
-     'resourcemanager' : resourcemanager_win_service_name,
-     'nodemanager' : nodemanager_win_service_name,
-     'historyserver' : historyserver_win_service_name,
-     'timelineserver' : timelineserver_win_service_name
-   }
- else:
-   mapred_user = config['configurations']['mapred-env']['mapred_user']
-   yarn_user = config['configurations']['yarn-env']['yarn_user']
-   yarn_pid_dir_prefix = config['configurations']['yarn-env']['yarn_pid_dir_prefix']
-   mapred_pid_dir_prefix = config['configurations']['mapred-env']['mapred_pid_dir_prefix']
-   yarn_pid_dir = format("{yarn_pid_dir_prefix}/{yarn_user}")
-   mapred_pid_dir = format("{mapred_pid_dir_prefix}/{mapred_user}")
- 
-   resourcemanager_pid_file = format("{yarn_pid_dir}/hadoop-{yarn_user}-resourcemanager.pid")
-   nodemanager_pid_file = format("{yarn_pid_dir}/hadoop-{yarn_user}-nodemanager.pid")
-   yarn_historyserver_pid_file_old = format("{yarn_pid_dir}/hadoop-{yarn_user}-historyserver.pid")
-   yarn_historyserver_pid_file = format("{yarn_pid_dir}/hadoop-{yarn_user}-timelineserver.pid")  # *-historyserver.pid is deprecated
-   mapred_historyserver_pid_file = format("{mapred_pid_dir}/hadoop-{mapred_user}-historyserver.pid")
- 
-   hadoop_conf_dir = functions.conf_select.get_hadoop_conf_dir()
- 
-   hostname = config['agentLevelParams']['hostname']
-   kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-   security_enabled = config['configurations']['cluster-env']['security_enabled']
+ mapred_user = config['configurations']['mapred-env']['mapred_user']
+ yarn_user = config['configurations']['yarn-env']['yarn_user']
+ yarn_pid_dir_prefix = config['configurations']['yarn-env']['yarn_pid_dir_prefix']
+ mapred_pid_dir_prefix = config['configurations']['mapred-env']['mapred_pid_dir_prefix']
+ yarn_pid_dir = format("{yarn_pid_dir_prefix}/{yarn_user}")
+ mapred_pid_dir = format("{mapred_pid_dir_prefix}/{mapred_user}")
+ 
+ resourcemanager_pid_file = format("{yarn_pid_dir}/hadoop-{yarn_user}-resourcemanager.pid")
+ nodemanager_pid_file = format("{yarn_pid_dir}/hadoop-{yarn_user}-nodemanager.pid")
+ yarn_historyserver_pid_file_old = format("{yarn_pid_dir}/hadoop-{yarn_user}-historyserver.pid")
+ yarn_historyserver_pid_file = format("{yarn_pid_dir}/hadoop-{yarn_user}-timelineserver.pid")  # *-historyserver.pid is deprecated
+ mapred_historyserver_pid_file = format("{mapred_pid_dir}/hadoop-{mapred_user}-historyserver.pid")
+ 
+ hadoop_home = stack_select.get_hadoop_dir("home")
+ hadoop_conf_dir = functions.conf_select.get_hadoop_conf_dir()
+ 
 -hostname = config['hostname']
++hostname = config['agentLevelParams']['hostname']
+ kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+ security_enabled = config['configurations']['cluster-env']['security_enabled']
  
 -stack_name = default("/hostLevelParams/stack_name", None)
 +stack_name = default("/clusterLevelParams/stack_name", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
index 0000000,bf9d79b..1775df5
mode 000000,100644..100644
--- a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
@@@ -1,0 -1,108 +1,108 @@@
+ """
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+ 
+     http://www.apache.org/licenses/LICENSE-2.0
+ 
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ 
+ """
+ 
+ import os
+ 
+ from ambari_commons.constants import AMBARI_SUDO_BINARY
+ from resource_management.libraries.script import Script
+ from resource_management.libraries.script.script import get_config_lock_file
+ from resource_management.libraries.functions import default
+ from resource_management.libraries.functions import conf_select
+ from resource_management.libraries.functions import stack_select
+ from resource_management.libraries.functions import format_jvm_option
+ from resource_management.libraries.functions.version import format_stack_version, get_major_version
+ from string import lower
+ 
+ config = Script.get_config()
+ tmp_dir = Script.get_tmp_dir()
+ 
+ dfs_type = default("/commandParams/dfs_type", "")
+ 
+ is_parallel_execution_enabled = int(default("/agentConfigParams/agent/parallel_execution", 0)) == 1
 -host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
++host_sys_prepped = default("/ambariLevelParams/host_sys_prepped", False)
+ 
+ sudo = AMBARI_SUDO_BINARY
+ 
 -stack_version_unformatted = config['hostLevelParams']['stack_version']
++stack_version_unformatted = config['clusterLevelParams']['stack_version']
+ stack_version_formatted = format_stack_version(stack_version_unformatted)
+ major_stack_version = get_major_version(stack_version_formatted)
+ 
+ # service name
+ service_name = config['serviceName']
+ 
+ # logsearch configuration
+ logsearch_logfeeder_conf = "/etc/ambari-logsearch-logfeeder/conf"
+ 
 -agent_cache_dir = config['hostLevelParams']['agentCacheDir']
 -service_package_folder = config['commandParams']['service_package_folder']
++agent_cache_dir = config['agentLevelParams']['agentCacheDir']
++service_package_folder = config['serviceLevelParams']['service_package_folder']
+ logsearch_service_name = service_name.lower().replace("_", "-")
+ logsearch_config_file_name = 'input.config-' + logsearch_service_name + ".json"
+ logsearch_config_file_path = agent_cache_dir + "/" + service_package_folder + "/templates/" + logsearch_config_file_name + ".j2"
+ logsearch_config_file_exists = os.path.isfile(logsearch_config_file_path)
+ 
+ # default hadoop params
+ hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
+ 
+ mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
+ 
+ versioned_stack_root = '/usr/hdp/current'
+ 
+ #security params
+ security_enabled = config['configurations']['cluster-env']['security_enabled']
+ 
+ #java params
 -java_home = config['hostLevelParams']['java_home']
++java_home = config['ambariLevelParams']['java_home']
+ 
+ #hadoop params
+ hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+ hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
+ hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
+ 
+ jsvc_path = "/usr/lib/bigtop-utils"
+ 
+ hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+ namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+ namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
+ namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+ namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m")
+ namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m")
+ 
+ jtnode_opt_newsize = "200m"
+ jtnode_opt_maxnewsize = "200m"
+ jtnode_heapsize =  "1024m"
+ ttnode_heapsize = "1024m"
+ 
+ dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+ mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+ mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+ 
+ #users and groups
+ hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+ user_group = config['configurations']['cluster-env']['user_group']
+ 
 -namenode_host = default("/clusterHostInfo/namenode_host", [])
++namenode_host = default("/clusterHostInfo/namenode_hosts", [])
+ has_namenode = not len(namenode_host) == 0
+ 
+ if has_namenode or dfs_type == 'HCFS':
+   hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+ 
+ link_configs_lock_file = get_config_lock_file()
+ stack_select_lock_file = os.path.join(tmp_dir, "stack_select_lock_file")
+ 
+ upgrade_suspended = default("/roleParams/upgrade_suspended", False)

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
index 0000000,1a4b074..22543bc
mode 000000,100644..100644
--- a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
@@@ -1,0 -1,132 +1,132 @@@
+ """
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+ 
+     http://www.apache.org/licenses/LICENSE-2.0
+ 
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ 
+ """
+ import os
+ 
+ import ambari_simplejson as json
+ from ambari_jinja2 import Environment as JinjaEnvironment
+ from resource_management.core.logger import Logger
+ from resource_management.core.resources.system import Directory, File
+ from resource_management.core.source import InlineTemplate, Template
+ from resource_management.libraries.functions import conf_select
+ from resource_management.libraries.functions import stack_select
+ from resource_management.libraries.functions.default import default
+ from resource_management.libraries.functions.format import format
+ from resource_management.libraries.functions.fcntl_based_process_lock import FcntlBasedProcessLock
+ from resource_management.libraries.resources.xml_config import XmlConfig
+ from resource_management.libraries.script import Script
+ 
+ 
+ def setup_stack_symlinks(struct_out_file):
+   """
+   Invokes <stack-selector-tool> set all against a calculated fully-qualified, "normalized" version based on a
+   stack version, such as "2.3". This should always be called after a component has been
+   installed to ensure that all HDP pointers are correct. The stack upgrade logic does not
+   interact with this since it's done via a custom command and will not trigger this hook.
+   :return:
+   """
+   import params
+   if params.upgrade_suspended:
+     Logger.warning("Skipping running stack-selector-tool because there is a suspended upgrade")
+     return
+ 
+   if params.host_sys_prepped:
+     Logger.warning("Skipping running stack-selector-tool because this is a sys_prepped host. This may cause symlink pointers not to be created for HDP components installed later on top of an already sys_prepped host")
+     return
+ 
+   # get the packages which the stack-select tool should be used on
+   stack_packages = stack_select.get_packages(stack_select.PACKAGE_SCOPE_INSTALL)
+   if stack_packages is None:
+     return
+ 
+   json_version = load_version(struct_out_file)
+ 
+   if not json_version:
+     Logger.info("There is no advertised version for this component stored in {0}".format(struct_out_file))
+     return
+ 
+   # On parallel command execution this should be executed by a single process at a time.
+   with FcntlBasedProcessLock(params.stack_select_lock_file, enabled = params.is_parallel_execution_enabled, skip_fcntl_failures = True):
+     for package in stack_packages:
+       stack_select.select(package, json_version)
+ 
+ 
+ def setup_config():
+   import params
+   stackversion = params.stack_version_unformatted
+   Logger.info("FS Type: {0}".format(params.dfs_type))
+ 
+   is_hadoop_conf_dir_present = False
+   if hasattr(params, "hadoop_conf_dir") and params.hadoop_conf_dir is not None and os.path.exists(params.hadoop_conf_dir):
+     is_hadoop_conf_dir_present = True
+   else:
+     Logger.warning("Parameter hadoop_conf_dir is missing or directory does not exist. This is expected if this host does not have any Hadoop components.")
+ 
+   if is_hadoop_conf_dir_present and (params.has_namenode or stackversion.find('Gluster') >= 0 or params.dfs_type == 'HCFS'):
+     # create core-site only if the hadoop config diretory exists
+     XmlConfig("core-site.xml",
+               conf_dir=params.hadoop_conf_dir,
+               configurations=params.config['configurations']['core-site'],
 -              configuration_attributes=params.config['configuration_attributes']['core-site'],
++              configuration_attributes=params.config['configurationAttributes']['core-site'],
+               owner=params.hdfs_user,
+               group=params.user_group,
+               only_if=format("ls {hadoop_conf_dir}"))
+ 
+   Directory(params.logsearch_logfeeder_conf,
+             mode=0755,
+             cd_access='a',
+             create_parents=True
+             )
+ 
+   if params.logsearch_config_file_exists:
+     File(format("{logsearch_logfeeder_conf}/" + params.logsearch_config_file_name),
+          content=Template(params.logsearch_config_file_path,extra_imports=[default])
+          )
+   else:
+     Logger.warning('No logsearch configuration exists at ' + params.logsearch_config_file_path)
+ 
+ 
+ def load_version(struct_out_file):
+   """
+   Load version from file.  Made a separate method for testing
+   """
+   try:
+     with open(struct_out_file, 'r') as fp:
+       json_info = json.load(fp)
+ 
+     return json_info['version']
+   except (IOError, KeyError, TypeError):
+     return None
+ 
+ 
+ def link_configs(struct_out_file):
+   """
+   Use the conf_select module to link configuration directories correctly.
+   """
+   import params
+ 
+   json_version = load_version(struct_out_file)
+ 
+   if not json_version:
+     Logger.info("Could not load 'version' from {0}".format(struct_out_file))
+     return
+ 
+   # On parallel command execution this should be executed by a single process at a time.
+   with FcntlBasedProcessLock(params.link_configs_lock_file, enabled = params.is_parallel_execution_enabled, skip_fcntl_failures = True):
+     for package_name, directories in conf_select.get_package_dirs().iteritems():
+       conf_select.convert_conf_directories_to_symlinks(package_name, json_version, directories)

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
index 0000000,20992e2..18f5739
mode 000000,100644..100644
--- a/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
@@@ -1,0 -1,254 +1,254 @@@
+ """
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+ 
+     http://www.apache.org/licenses/LICENSE-2.0
+ 
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ 
+ """
+ 
+ import collections
+ import re
+ import os
+ import ast
+ 
+ import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
+ 
+ from resource_management.libraries.script import Script
+ from resource_management.libraries.functions import default
+ from resource_management.libraries.functions import format
+ from resource_management.libraries.functions import conf_select
+ from resource_management.libraries.functions import stack_select
+ from resource_management.libraries.functions import format_jvm_option
+ from resource_management.libraries.functions.is_empty import is_empty
+ from resource_management.libraries.functions.version import format_stack_version
+ from resource_management.libraries.functions.expect import expect
+ from resource_management.libraries.functions import StackFeature
+ from resource_management.libraries.functions.stack_features import check_stack_feature
+ from resource_management.libraries.functions.stack_features import get_stack_feature_version
+ from resource_management.libraries.functions.get_architecture import get_architecture
+ from ambari_commons.constants import AMBARI_SUDO_BINARY
+ 
+ 
+ config = Script.get_config()
+ tmp_dir = Script.get_tmp_dir()
+ 
+ stack_root = Script.get_stack_root()
+ 
+ architecture = get_architecture()
+ 
+ dfs_type = default("/commandParams/dfs_type", "")
+ 
+ artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
 -jdk_name = default("/hostLevelParams/jdk_name", None)
 -java_home = config['hostLevelParams']['java_home']
 -java_version = expect("/hostLevelParams/java_version", int)
 -jdk_location = config['hostLevelParams']['jdk_location']
++jdk_name = default("/ambariLevelParams/jdk_name", None)
++java_home = config['ambariLevelParams']['java_home']
++java_version = expect("/ambariLevelParams/java_version", int)
++jdk_location = config['ambariLevelParams']['jdk_location']
+ 
+ hadoop_custom_extensions_enabled = default("/configurations/core-site/hadoop.custom-extensions.enabled", False)
+ 
+ sudo = AMBARI_SUDO_BINARY
+ 
+ ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
+ 
 -stack_version_unformatted = config['hostLevelParams']['stack_version']
++stack_version_unformatted = config['clusterLevelParams']['stack_version']
+ stack_version_formatted = format_stack_version(stack_version_unformatted)
+ 
+ upgrade_type = Script.get_upgrade_type(default("/commandParams/upgrade_type", ""))
+ version = default("/commandParams/version", None)
+ # Handle upgrade and downgrade
+ if (upgrade_type is not None) and version:
+   stack_version_formatted = format_stack_version(version)
+ ambari_java_home = default("/commandParams/ambari_java_home", None)
+ ambari_jdk_name = default("/commandParams/ambari_jdk_name", None)
+ 
+ security_enabled = config['configurations']['cluster-env']['security_enabled']
+ hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+ 
+ # Some datanode settings
+ dfs_dn_addr = default('/configurations/hdfs-site/dfs.datanode.address', None)
+ dfs_dn_http_addr = default('/configurations/hdfs-site/dfs.datanode.http.address', None)
+ dfs_dn_https_addr = default('/configurations/hdfs-site/dfs.datanode.https.address', None)
+ dfs_http_policy = default('/configurations/hdfs-site/dfs.http.policy', None)
+ secure_dn_ports_are_in_use = False
+ 
+ def get_port(address):
+   """
+   Extracts port from the address like 0.0.0.0:1019
+   """
+   if address is None:
+     return None
+   m = re.search(r'(?:http(?:s)?://)?([\w\d.]*):(\d{1,5})', address)
+   if m is not None:
+     return int(m.group(2))
+   else:
+     return None
+ 
+ def is_secure_port(port):
+   """
+   Returns True if port is root-owned at *nix systems
+   """
+   if port is not None:
+     return port < 1024
+   else:
+     return False
+ 
+ # upgrades would cause these directories to have a version instead of "current"
+ # which would cause a lot of problems when writing out hadoop-env.sh; instead
+ # force the use of "current" in the hook
+ hdfs_user_nofile_limit = default("/configurations/hadoop-env/hdfs_user_nofile_limit", "128000")
+ hadoop_home = stack_select.get_hadoop_dir("home")
+ hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
+ hadoop_lib_home = stack_select.get_hadoop_dir("lib")
+ 
+ hadoop_dir = "/etc/hadoop"
+ hadoop_java_io_tmpdir = os.path.join(tmp_dir, "hadoop_java_io_tmpdir")
+ datanode_max_locked_memory = config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory']
+ is_datanode_max_locked_memory_set = not is_empty(config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory'])
+ 
+ mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
+ 
+ if not security_enabled:
+   hadoop_secure_dn_user = '""'
+ else:
+   dfs_dn_port = get_port(dfs_dn_addr)
+   dfs_dn_http_port = get_port(dfs_dn_http_addr)
+   dfs_dn_https_port = get_port(dfs_dn_https_addr)
+   # We try to avoid inability to start datanode as a plain user due to usage of root-owned ports
+   if dfs_http_policy == "HTTPS_ONLY":
+     secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_https_port)
+   elif dfs_http_policy == "HTTP_AND_HTTPS":
+     secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port) or is_secure_port(dfs_dn_https_port)
+   else:   # params.dfs_http_policy == "HTTP_ONLY" or not defined:
+     secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port)
+   if secure_dn_ports_are_in_use:
+     hadoop_secure_dn_user = hdfs_user
+   else:
+     hadoop_secure_dn_user = '""'
+ 
+ #hadoop params
+ hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+ hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
+ hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
+ 
+ jsvc_path = "/usr/lib/bigtop-utils"
+ 
+ hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+ namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+ namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
+ namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+ namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m")
+ namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m")
+ 
+ jtnode_opt_newsize = "200m"
+ jtnode_opt_maxnewsize = "200m"
+ jtnode_heapsize =  "1024m"
+ ttnode_heapsize = "1024m"
+ 
+ dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+ nfsgateway_heapsize = config['configurations']['hadoop-env']['nfsgateway_heapsize']
+ mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+ mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+ hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
+ 
+ #users and groups
+ hbase_user = config['configurations']['hbase-env']['hbase_user']
+ smoke_user =  config['configurations']['cluster-env']['smokeuser']
+ gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
+ gmond_user = config['configurations']['ganglia-env']["gmond_user"]
+ tez_user = config['configurations']['tez-env']["tez_user"]
+ oozie_user = config['configurations']['oozie-env']["oozie_user"]
+ falcon_user = config['configurations']['falcon-env']["falcon_user"]
+ ranger_user = config['configurations']['ranger-env']["ranger_user"]
+ zeppelin_user = config['configurations']['zeppelin-env']["zeppelin_user"]
+ zeppelin_group = config['configurations']['zeppelin-env']["zeppelin_group"]
+ 
+ user_group = config['configurations']['cluster-env']['user_group']
+ 
 -ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
 -namenode_host = default("/clusterHostInfo/namenode_host", [])
++ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_hosts", [])
++namenode_host = default("/clusterHostInfo/namenode_hosts", [])
+ hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
+ oozie_servers = default("/clusterHostInfo/oozie_server", [])
+ falcon_server_hosts = default("/clusterHostInfo/falcon_server_hosts", [])
+ ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
+ zeppelin_master_hosts = default("/clusterHostInfo/zeppelin_master_hosts", [])
+ 
+ # get the correct version to use for checking stack features
+ version_for_stack_feature_checks = get_stack_feature_version(config)
+ 
+ 
+ has_namenode = not len(namenode_host) == 0
+ has_ganglia_server = not len(ganglia_server_hosts) == 0
+ has_tez = 'tez-site' in config['configurations']
+ has_hbase_masters = not len(hbase_master_hosts) == 0
+ has_oozie_server = not len(oozie_servers) == 0
+ has_falcon_server_hosts = not len(falcon_server_hosts) == 0
+ has_ranger_admin = not len(ranger_admin_hosts) == 0
+ has_zeppelin_master = not len(zeppelin_master_hosts) == 0
+ stack_supports_zk_security = check_stack_feature(StackFeature.SECURE_ZOOKEEPER, version_for_stack_feature_checks)
+ 
+ # HDFS High Availability properties
+ dfs_ha_enabled = False
+ dfs_ha_nameservices = default('/configurations/hdfs-site/dfs.internal.nameservices', None)
+ if dfs_ha_nameservices is None:
+   dfs_ha_nameservices = default('/configurations/hdfs-site/dfs.nameservices', None)
+ dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"), None)
+ if dfs_ha_namenode_ids:
+   dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",")
+   dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list)
+   if dfs_ha_namenode_ids_array_len > 1:
+     dfs_ha_enabled = True
+ 
+ if has_namenode or dfs_type == 'HCFS':
+     hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+     hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
+ 
+ hbase_tmp_dir = "/tmp/hbase-hbase"
+ 
+ proxyuser_group = default("/configurations/hadoop-env/proxyuser_group","users")
+ ranger_group = config['configurations']['ranger-env']['ranger_group']
+ dfs_cluster_administrators_group = config['configurations']['hdfs-site']["dfs.cluster.administrators"]
+ 
+ sysprep_skip_create_users_and_groups = default("/configurations/cluster-env/sysprep_skip_create_users_and_groups", False)
+ ignore_groupsusers_create = default("/configurations/cluster-env/ignore_groupsusers_create", False)
+ fetch_nonlocal_groups = config['configurations']['cluster-env']["fetch_nonlocal_groups"]
+ 
+ smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
+ if has_hbase_masters:
+   hbase_user_dirs = format("/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
+ #repo params
 -repo_info = config['hostLevelParams']['repo_info']
++repo_info = config['hostLevelParams']['repoInfo']
+ service_repo_info = default("/hostLevelParams/service_repo_info",None)
+ 
+ user_to_groups_dict = {}
+ 
+ #Append new user-group mapping to the dict
+ try:
 -  user_group_map = ast.literal_eval(config['hostLevelParams']['user_groups'])
++  user_group_map = ast.literal_eval(config['clusterLevelParams']['user_groups'])
+   for key in user_group_map.iterkeys():
+     user_to_groups_dict[key] = user_group_map[key]
+ except ValueError:
+   print('User Group mapping (user_group) is missing in the hostLevelParams')
+ 
+ user_to_gid_dict = collections.defaultdict(lambda:user_group)
+ 
 -user_list = json.loads(config['hostLevelParams']['user_list'])
 -group_list = json.loads(config['hostLevelParams']['group_list'])
 -host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
++user_list = json.loads(config['clusterLevelParams']['user_list'])
++group_list = json.loads(config['clusterLevelParams']['group_list'])
++host_sys_prepped = default("/ambariLevelParams/host_sys_prepped", False)
+ 
+ tez_am_view_acls = config['configurations']['tez-site']["tez.am.view-acls"]
+ override_uid = str(default("/configurations/cluster-env/override_uid", "true")).lower()
+ 
+ # if NN HA on secure clutser, access Zookeper securely
+ if stack_supports_zk_security and dfs_ha_enabled and security_enabled:
+     hadoop_zkfc_opts=format("-Dzookeeper.sasl.client=true -Dzookeeper.sasl.client.username=zookeeper -Djava.security.auth.login.config={hadoop_conf_secure_dir}/hdfs_jaas.conf -Dzookeeper.sasl.clientconfig=Client")

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/params.py
index 0000000,50c5a40..36deb2c
mode 000000,100644..100644
--- a/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/params.py
@@@ -1,0 -1,115 +1,115 @@@
+ """
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+ 
+     http://www.apache.org/licenses/LICENSE-2.0
+ 
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ 
+ """
+ 
+ from ambari_commons.constants import AMBARI_SUDO_BINARY
+ from resource_management.libraries.functions.version import format_stack_version, compare_versions
+ from resource_management.core.system import System
+ from resource_management.libraries.script.script import Script
+ from resource_management.libraries.functions import default, format
+ from resource_management.libraries.functions.expect import expect
+ 
+ config = Script.get_config()
+ tmp_dir = Script.get_tmp_dir()
+ sudo = AMBARI_SUDO_BINARY
+ 
 -stack_version_unformatted = config['hostLevelParams']['stack_version']
 -agent_stack_retry_on_unavailability = config['hostLevelParams']['agent_stack_retry_on_unavailability']
 -agent_stack_retry_count = expect("/hostLevelParams/agent_stack_retry_count", int)
++stack_version_unformatted = config['clusterLevelParams']['stack_version']
++agent_stack_retry_on_unavailability = config['ambariLevelParams']['agent_stack_retry_on_unavailability']
++agent_stack_retry_count = expect("/ambariLevelParams/agent_stack_retry_count", int)
+ stack_version_formatted = format_stack_version(stack_version_unformatted)
+ 
+ #users and groups
+ hbase_user = config['configurations']['hbase-env']['hbase_user']
+ smoke_user =  config['configurations']['cluster-env']['smokeuser']
+ gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
+ gmond_user = config['configurations']['ganglia-env']["gmond_user"]
+ tez_user = config['configurations']['tez-env']["tez_user"]
+ 
+ user_group = config['configurations']['cluster-env']['user_group']
+ proxyuser_group = default("/configurations/hadoop-env/proxyuser_group","users")
+ 
+ hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+ 
+ # repo templates
+ repo_rhel_suse =  config['configurations']['cluster-env']['repo_suse_rhel_template']
+ repo_ubuntu =  config['configurations']['cluster-env']['repo_ubuntu_template']
+ 
+ #hosts
 -hostname = config["hostname"]
++hostname = config['agentLevelParams']['hostname']
+ ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 -rm_host = default("/clusterHostInfo/rm_host", [])
 -slave_hosts = default("/clusterHostInfo/slave_hosts", [])
++rm_host = default("/clusterHostInfo/resourcemanager_hosts", [])
++slave_hosts = default("/clusterHostInfo/datanode_hosts", [])
+ oozie_servers = default("/clusterHostInfo/oozie_server", [])
 -hcat_server_hosts = default("/clusterHostInfo/webhcat_server_host", [])
 -hive_server_host =  default("/clusterHostInfo/hive_server_host", [])
++hcat_server_hosts = default("/clusterHostInfo/webhcat_server_hosts", [])
++hive_server_host =  default("/clusterHostInfo/hive_server_hosts", [])
+ hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
 -hs_host = default("/clusterHostInfo/hs_host", [])
 -jtnode_host = default("/clusterHostInfo/jtnode_host", [])
 -namenode_host = default("/clusterHostInfo/namenode_host", [])
++hs_host = default("/clusterHostInfo/historyserver_hosts", [])
++jtnode_host = default("/clusterHostInfo/jtnode_hosts", [])
++namenode_host = default("/clusterHostInfo/namenode_hosts", [])
+ zk_hosts = default("/clusterHostInfo/zookeeper_hosts", [])
 -ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
++ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_hosts", [])
+ storm_server_hosts = default("/clusterHostInfo/nimbus_hosts", [])
+ falcon_host =  default('/clusterHostInfo/falcon_server_hosts', [])
+ 
+ has_sqoop_client = 'sqoop-env' in config['configurations']
+ has_namenode = not len(namenode_host) == 0
+ has_hs = not len(hs_host) == 0
+ has_resourcemanager = not len(rm_host) == 0
+ has_slaves = not len(slave_hosts) == 0
+ has_oozie_server = not len(oozie_servers)  == 0
+ has_hcat_server_host = not len(hcat_server_hosts)  == 0
+ has_hive_server_host = not len(hive_server_host)  == 0
+ has_hbase_masters = not len(hbase_master_hosts) == 0
+ has_zk_host = not len(zk_hosts) == 0
+ has_ganglia_server = not len(ganglia_server_hosts) == 0
+ has_storm_server = not len(storm_server_hosts) == 0
+ has_falcon_server = not len(falcon_host) == 0
+ has_tez = 'tez-site' in config['configurations']
+ 
+ is_namenode_master = hostname in namenode_host
+ is_jtnode_master = hostname in jtnode_host
+ is_rmnode_master = hostname in rm_host
+ is_hsnode_master = hostname in hs_host
+ is_hbase_master = hostname in hbase_master_hosts
+ is_slave = hostname in slave_hosts
+ if has_ganglia_server:
+   ganglia_server_host = ganglia_server_hosts[0]
+ 
+ hbase_tmp_dir = "/tmp/hbase-hbase"
+ 
+ #security params
+ security_enabled = config['configurations']['cluster-env']['security_enabled']
+ 
+ #java params
 -java_home = config['hostLevelParams']['java_home']
++java_home = config['ambariLevelParams']['java_home']
+ artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
 -jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user
 -jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
 -jce_location = config['hostLevelParams']['jdk_location']
 -jdk_location = config['hostLevelParams']['jdk_location']
++jdk_name = default("/ambariLevelParams/jdk_name", None) # None when jdk is already installed by user
++jce_policy_zip = default("/ambariLevelParams/jce_name", None) # None when jdk is already installed by user
++jce_location = config['ambariLevelParams']['jdk_location']
++jdk_location = config['ambariLevelParams']['jdk_location']
+ ignore_groupsusers_create = default("/configurations/cluster-env/ignore_groupsusers_create", False)
 -host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
++host_sys_prepped = default("/ambariLevelParams/host_sys_prepped", False)
+ 
+ smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
+ if has_hbase_masters:
+   hbase_user_dirs = format("/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
+ #repo params
 -repo_info = config['hostLevelParams']['repo_info']
++repo_info = config['hostLevelParams']['repoInfo']
+ service_repo_info = default("/hostLevelParams/service_repo_info",None)
+ 
+ repo_file = default("/repositoryFile", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/repo_initialization.py
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/repo_initialization.py
index 0000000,9f2b344..d0456ce
mode 000000,100644..100644
--- a/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/repo_initialization.py
+++ b/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/repo_initialization.py
@@@ -1,0 -1,75 +1,73 @@@
+ """
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+ 
+     http://www.apache.org/licenses/LICENSE-2.0
+ 
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ 
+ """
+ 
+ from ambari_commons.os_check import OSCheck
+ from resource_management.libraries.resources.repository import Repository
+ from resource_management.libraries.functions.repository_util import create_repo_files, CommandRepository, UBUNTU_REPO_COMPONENTS_POSTFIX
+ from resource_management.core.logger import Logger
+ import ambari_simplejson as json
+ 
+ 
 -def _alter_repo(action, repo_string, repo_template):
++def _alter_repo(action, repo_dicts, repo_template):
+   """
+   @param action: "delete" or "create"
 -  @param repo_string: e.g. "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]"
++  @param repo_dicts: e.g. "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]"
+   """
 -  repo_dicts = json.loads(repo_string)
 -
+   if not isinstance(repo_dicts, list):
+     repo_dicts = [repo_dicts]
+ 
+   if 0 == len(repo_dicts):
+     Logger.info("Repository list is empty. Ambari may not be managing the repositories.")
+   else:
+     Logger.info("Initializing {0} repositories".format(str(len(repo_dicts))))
+ 
+   for repo in repo_dicts:
+     if not 'baseUrl' in repo:
+       repo['baseUrl'] = None
+     if not 'mirrorsList' in repo:
+       repo['mirrorsList'] = None
+ 
+     ubuntu_components = [ repo['distribution'] if 'distribution' in repo and repo['distribution'] else repo['repoName'] ] \
+                         + [repo['components'].replace(",", " ") if 'components' in repo and repo['components'] else UBUNTU_REPO_COMPONENTS_POSTFIX]
+ 
+     Repository(repo['repoId'],
+                action = action,
+                base_url = repo['baseUrl'],
+                mirror_list = repo['mirrorsList'],
+                repo_file_name = repo['repoName'],
+                repo_template = repo_template,
+                components = ubuntu_components) # ubuntu specific
+ 
+ 
+ def install_repos():
+   import params
+   if params.host_sys_prepped:
+     return
+ 
+   template = params.repo_rhel_suse if OSCheck.is_suse_family() or OSCheck.is_redhat_family() else params.repo_ubuntu
+ 
+   # use this newer way of specifying repositories, if available
+   if params.repo_file is not None:
+     create_repo_files(template, CommandRepository(params.repo_file))
+     return
+ 
+   _alter_repo("create", params.repo_info, template)
+ 
+   if params.service_repo_info:
+     _alter_repo("create", params.service_repo_info, template)


[24/50] [abbrv] ambari git commit: AMBARI-22223.jdbc(spark[2]) URLs not configured correctly for secure clusters(Prabhjyot Singh via Venkata Sairam)

Posted by ao...@apache.org.
AMBARI-22223.jdbc(spark[2]) URLs not configured correctly for secure clusters(Prabhjyot Singh via Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2a8ac0de
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2a8ac0de
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2a8ac0de

Branch: refs/heads/branch-3.0-perf
Commit: 2a8ac0ded9a9916dd71b84ac21a2a6165db60284
Parents: d4458da
Author: Venkata Sairam <ve...@gmail.com>
Authored: Mon Oct 16 18:15:58 2017 +0530
Committer: Venkata Sairam <ve...@gmail.com>
Committed: Mon Oct 16 18:15:58 2017 +0530

----------------------------------------------------------------------
 .../ZEPPELIN/0.7.0/package/scripts/master.py         | 12 ++++++++----
 .../ZEPPELIN/0.7.0/package/scripts/params.py         | 15 +++++++++------
 2 files changed, 17 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2a8ac0de/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
index 6a84d79..9d179b8 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
@@ -492,8 +492,10 @@ class Master(Script):
           interpreter['properties']['spark.proxy.user.property'] = 'hive.server2.proxy.user'
           interpreter['properties']['spark.url'] = 'jdbc:hive2://' + \
               params.spark_thrift_server_hosts + ':' + params.spark_hive_thrift_port + '/'
-          if params.spark_hive_principal:
-            interpreter['properties']['spark.url'] += ';principal=' + params.spark_hive_principal
+          if params.hive_principal:
+            interpreter['properties']['spark.url'] += ';principal=' + params.hive_principal
+          if params.hive_transport_mode:
+            interpreter['properties']['spark.url'] += ';transportMode=' + params.hive_transport_mode
           if 'spark.splitQueries' not in interpreter['properties']:
             interpreter['properties']['spark.splitQueries'] = "true"
 
@@ -504,8 +506,10 @@ class Master(Script):
           interpreter['properties']['spark2.proxy.user.property'] = 'hive.server2.proxy.user'
           interpreter['properties']['spark2.url'] = 'jdbc:hive2://' + \
               params.spark2_thrift_server_hosts + ':' + params.spark2_hive_thrift_port + '/'
-          if params.spark_hive_principal:
-            interpreter['properties']['spark2.url'] += ';principal=' + params.spark2_hive_principal
+          if params.hive_principal:
+            interpreter['properties']['spark2.url'] += ';principal=' + params.hive_principal
+          if params.hive_transport_mode:
+            interpreter['properties']['spark2.url'] += ';transportMode=' + params.hive_transport_mode
           if 'spark2.splitQueries' not in interpreter['properties']:
             interpreter['properties']['spark2.splitQueries'] = "true"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2a8ac0de/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
index e69037c..2290a7f 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
@@ -162,13 +162,19 @@ if 'hive_server_interactive_hosts' in master_configs and len(master_configs['hiv
 spark_thrift_server_hosts = None
 spark_hive_thrift_port = None
 spark_hive_principal = None
+hive_principal = None
+hive_transport_mode = None
+
+if 'hive-site' in config['configurations']:
+  if 'hive.server2.authentication.kerberos.principal' in config['configurations']['hive-site']:
+    hive_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
+  if 'hive.server2.transport.mode' in config['configurations']['hive-site']:
+    hive_transport_mode = config['configurations']['hive-site']['hive.server2.transport.mode']
+
 if 'spark_thriftserver_hosts' in master_configs and len(master_configs['spark_thriftserver_hosts']) != 0:
   spark_thrift_server_hosts = str(master_configs['spark_thriftserver_hosts'][0])
   if config['configurations']['spark-hive-site-override']:
     spark_hive_thrift_port = config['configurations']['spark-hive-site-override']['hive.server2.thrift.port']
-  if config['configurations']['spark-thrift-sparkconf'] and \
-      'spark.sql.hive.hiveserver2.jdbc.url.principal' in config['configurations']['spark-thrift-sparkconf']:
-    spark_hive_principal = config['configurations']['spark-thrift-sparkconf']['spark.sql.hive.hiveserver2.jdbc.url.principal']
 
 spark2_thrift_server_hosts = None
 spark2_hive_thrift_port = None
@@ -177,9 +183,6 @@ if 'spark2_thriftserver_hosts' in master_configs and len(master_configs['spark2_
   spark2_thrift_server_hosts = str(master_configs['spark2_thriftserver_hosts'][0])
   if config['configurations']['spark2-hive-site-override']:
     spark2_hive_thrift_port = config['configurations']['spark2-hive-site-override']['hive.server2.thrift.port']
-  if config['configurations']['spark2-thrift-sparkconf'] and \
-      'spark.sql.hive.hiveserver2.jdbc.url.principal' in config['configurations']['spark2-thrift-sparkconf']:
-    spark2_hive_principal = config['configurations']['spark2-thrift-sparkconf']['spark.sql.hive.hiveserver2.jdbc.url.principal']
 
 
 # detect hbase details if installed


[14/50] [abbrv] ambari git commit: AMBARI-22235. Druid service check failed during EU. (b-slim via nishantmonu51)

Posted by ao...@apache.org.
AMBARI-22235. Druid service check failed during EU. (b-slim via nishantmonu51)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c8c1812b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c8c1812b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c8c1812b

Branch: refs/heads/branch-3.0-perf
Commit: c8c1812b07ba0f64093190519cf829df8ea639d0
Parents: 20faae7
Author: Nishant <ni...@gmail.com>
Authored: Fri Oct 13 22:06:41 2017 +0530
Committer: Nishant <ni...@gmail.com>
Committed: Fri Oct 13 22:06:41 2017 +0530

----------------------------------------------------------------------
 .../stacks/HDP/2.6/services/DRUID/kerberos.json | 45 ++++----------------
 .../HDP/2.6/services/SUPERSET/kerberos.json     | 10 +----
 2 files changed, 11 insertions(+), 44 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c8c1812b/ambari-server/src/main/resources/stacks/HDP/2.6/services/DRUID/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/DRUID/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.6/services/DRUID/kerberos.json
index 198c351..f186e31 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/DRUID/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/DRUID/kerberos.json
@@ -16,7 +16,7 @@
         {
           "name": "druid",
           "principal": {
-            "value": "${druid-env/druid_user}@${realm}",
+            "value": "${druid-env/druid_user}${principal_suffix}@${realm}",
             "type": "user",
             "configuration": "druid-common/druid.hadoop.security.kerberos.principal",
             "local_username": "${druid-env/druid_user}"
@@ -41,49 +41,22 @@
       ],
       "components": [
         {
-          "name": "DRUID_HISTORICAL",
-          "identities": [
-            {
-              "name": "druid_druid_historical_druid",
-              "reference": "/druid"
-            }
-          ]
+          "name": "DRUID_HISTORICAL"
         },
         {
-          "name": "DRUID_BROKER",
-          "identities": [
-            {
-              "name": "druid_druid_broker_druid",
-              "reference": "/druid"
-            }
-          ]
+          "name": "DRUID_BROKER"
         },
         {
-          "name": "DRUID_OVERLORD",
-          "identities": [
-            {
-              "name": "druid_druid_historical_druid",
-              "reference": "/druid"
-            }
-          ]
+          "name": "DRUID_OVERLORD"
         },
         {
-          "name": "DRUID_COORDINATOR",
-          "identities": [
-            {
-              "name": "druid_druid_coordinator_druid",
-              "reference": "/druid"
-            }
-          ]
+          "name": "DRUID_COORDINATOR"
         },
         {
-          "name": "DRUID_MIDDLEMANAGER",
-          "identities": [
-            {
-              "name": "druid_druid_middlemanager_druid",
-              "reference": "/druid"
-            }
-          ]
+          "name": "DRUID_MIDDLEMANAGER"
+        },
+        {
+          "name": "DRUID_ROUTER"
         }
       ],
       "configurations": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8c1812b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SUPERSET/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SUPERSET/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SUPERSET/kerberos.json
index 5d5125d..0de6737 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SUPERSET/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SUPERSET/kerberos.json
@@ -6,7 +6,7 @@
         {
           "name": "superset",
           "principal": {
-            "value": "${superset-env/superset_user}@${realm}",
+            "value": "${superset-env/superset_user}${principal_suffix}@${realm}",
             "type": "user",
             "configuration": "superset/KERBEROS_PRINCIPAL",
             "local_username": "${superset-env/superset_user}"
@@ -31,13 +31,7 @@
       ],
       "components": [
         {
-          "name": "SUPERSET",
-          "identities": [
-            {
-              "name": "superset_superset_superset",
-              "reference": "/superset"
-            }
-          ]
+          "name": "SUPERSET"
         }
       ],
       "configurations": [


[48/50] [abbrv] ambari git commit: Merge remote-tracking branch 'remotes/origin/trunk' into branch-3.0-perf

Posted by ao...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
index 0000000,6c26e01..3dca93d
mode 000000,100644..100644
--- a/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
@@@ -1,0 -1,380 +1,371 @@@
+ """
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+ 
+     http://www.apache.org/licenses/LICENSE-2.0
+ 
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ 
+ """
+ 
+ import os
+ 
+ from resource_management.libraries.functions import conf_select
+ from resource_management.libraries.functions import stack_select
+ from resource_management.libraries.functions import default
+ from resource_management.libraries.functions import format_jvm_option
+ from resource_management.libraries.functions import format
+ from resource_management.libraries.functions.version import format_stack_version, compare_versions, get_major_version
+ from ambari_commons.os_check import OSCheck
+ from resource_management.libraries.script.script import Script
+ from resource_management.libraries.functions import get_kinit_path
+ from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
+ from resource_management.libraries.resources.hdfs_resource import HdfsResource
+ from resource_management.libraries.functions.stack_features import check_stack_feature
+ from resource_management.libraries.functions.stack_features import get_stack_feature_version
+ from resource_management.libraries.functions import StackFeature
+ from ambari_commons.constants import AMBARI_SUDO_BINARY
+ 
+ config = Script.get_config()
+ tmp_dir = Script.get_tmp_dir()
+ artifact_dir = tmp_dir + "/AMBARI-artifacts"
+ 
+ version_for_stack_feature_checks = get_stack_feature_version(config)
+ stack_supports_hadoop_custom_extensions = check_stack_feature(StackFeature.HADOOP_CUSTOM_EXTENSIONS, version_for_stack_feature_checks)
+ 
+ sudo = AMBARI_SUDO_BINARY
+ 
+ # Global flag enabling or disabling the sysprep feature
 -host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
++host_sys_prepped = default("/ambariLevelParams/host_sys_prepped", False)
+ 
+ # Whether to skip copying fast-hdfs-resource.jar to /var/lib/ambari-agent/lib/
+ # This is required if tarballs are going to be copied to HDFS, so set to False
+ sysprep_skip_copy_fast_jar_hdfs = host_sys_prepped and default("/configurations/cluster-env/sysprep_skip_copy_fast_jar_hdfs", False)
+ 
+ # Whether to skip setting up the unlimited key JCE policy
+ sysprep_skip_setup_jce = host_sys_prepped and default("/configurations/cluster-env/sysprep_skip_setup_jce", False)
+ 
 -stack_version_unformatted = config['hostLevelParams']['stack_version']
++stack_version_unformatted = config['clusterLevelParams']['stack_version']
+ stack_version_formatted = format_stack_version(stack_version_unformatted)
+ major_stack_version = get_major_version(stack_version_formatted)
+ 
+ dfs_type = default("/commandParams/dfs_type", "")
+ hadoop_conf_dir = "/etc/hadoop/conf"
+ component_list = default("/localComponents", [])
+ 
+ hdfs_tmp_dir = default("/configurations/hadoop-env/hdfs_tmp_dir", "/tmp")
+ 
+ hadoop_metrics2_properties_content = None
+ if 'hadoop-metrics2.properties' in config['configurations']:
+   hadoop_metrics2_properties_content = config['configurations']['hadoop-metrics2.properties']['content']
+ 
+ hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
+ hadoop_lib_home = stack_select.get_hadoop_dir("lib")
+ hadoop_bin = stack_select.get_hadoop_dir("sbin")
+ 
+ mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
+ hadoop_home = stack_select.get_hadoop_dir("home")
+ create_lib_snappy_symlinks = False
+   
+ current_service = config['serviceName']
+ 
+ #security params
+ security_enabled = config['configurations']['cluster-env']['security_enabled']
+ 
 -ambari_server_resources_url = default("/hostLevelParams/jdk_location", None)
++ambari_server_resources_url = default("/ambariLevelParams/jdk_location", None)
+ if ambari_server_resources_url is not None and ambari_server_resources_url.endswith('/'):
+   ambari_server_resources_url = ambari_server_resources_url[:-1]
+ 
+ # Unlimited key JCE policy params
 -jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
 -unlimited_key_jce_required = default("/hostLevelParams/unlimited_key_jce_required", False)
 -jdk_name = default("/hostLevelParams/jdk_name", None)
 -java_home = default("/hostLevelParams/java_home", None)
++jce_policy_zip = default("/ambariLevelParams/jce_name", None) # None when jdk is already installed by user
++unlimited_key_jce_required = default("/componentLevelParams/unlimited_key_jce_required", False)
++jdk_name = default("/ambariLevelParams/jdk_name", None)
++java_home = default("/ambariLevelParams/java_home", None)
+ java_exec = "{0}/bin/java".format(java_home) if java_home is not None else "/bin/java"
+ 
+ #users and groups
+ has_hadoop_env = 'hadoop-env' in config['configurations']
+ mapred_user = config['configurations']['mapred-env']['mapred_user']
+ hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+ yarn_user = config['configurations']['yarn-env']['yarn_user']
+ 
+ user_group = config['configurations']['cluster-env']['user_group']
+ 
+ #hosts
 -hostname = config["hostname"]
++hostname = config['agentLevelParams']['hostname']
+ ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 -rm_host = default("/clusterHostInfo/rm_host", [])
 -slave_hosts = default("/clusterHostInfo/slave_hosts", [])
++rm_host = default("/clusterHostInfo/resourcemanager_hosts", [])
++slave_hosts = default("/clusterHostInfo/datanode_hosts", [])
+ oozie_servers = default("/clusterHostInfo/oozie_server", [])
 -hcat_server_hosts = default("/clusterHostInfo/webhcat_server_host", [])
 -hive_server_host =  default("/clusterHostInfo/hive_server_host", [])
++hcat_server_hosts = default("/clusterHostInfo/webhcat_server_hosts", [])
++hive_server_host =  default("/clusterHostInfo/hive_server_hosts", [])
+ hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
 -hs_host = default("/clusterHostInfo/hs_host", [])
 -jtnode_host = default("/clusterHostInfo/jtnode_host", [])
 -namenode_host = default("/clusterHostInfo/namenode_host", [])
++hs_host = default("/clusterHostInfo/historyserver_hosts", [])
++jtnode_host = default("/clusterHostInfo/jtnode_hosts", [])
++namenode_host = default("/clusterHostInfo/namenode_hosts", [])
+ zk_hosts = default("/clusterHostInfo/zookeeper_hosts", [])
 -ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
++ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_hosts", [])
+ cluster_name = config["clusterName"]
+ set_instanceId = "false"
+ if 'cluster-env' in config['configurations'] and \
+     'metrics_collector_external_hosts' in config['configurations']['cluster-env']:
+   ams_collector_hosts = config['configurations']['cluster-env']['metrics_collector_external_hosts']
+   set_instanceId = "true"
+ else:
+   ams_collector_hosts = ",".join(default("/clusterHostInfo/metrics_collector_hosts", []))
+ 
+ has_namenode = not len(namenode_host) == 0
+ has_resourcemanager = not len(rm_host) == 0
+ has_slaves = not len(slave_hosts) == 0
+ has_oozie_server = not len(oozie_servers) == 0
+ has_hcat_server_host = not len(hcat_server_hosts) == 0
+ has_hive_server_host = not len(hive_server_host) == 0
+ has_hbase_masters = not len(hbase_master_hosts) == 0
+ has_zk_host = not len(zk_hosts) == 0
+ has_ganglia_server = not len(ganglia_server_hosts) == 0
+ has_metric_collector = not len(ams_collector_hosts) == 0
+ 
+ is_namenode_master = hostname in namenode_host
+ is_jtnode_master = hostname in jtnode_host
+ is_rmnode_master = hostname in rm_host
+ is_hsnode_master = hostname in hs_host
+ is_hbase_master = hostname in hbase_master_hosts
+ is_slave = hostname in slave_hosts
+ 
+ if has_ganglia_server:
+   ganglia_server_host = ganglia_server_hosts[0]
+ 
+ metric_collector_port = None
+ if has_metric_collector:
+   if 'cluster-env' in config['configurations'] and \
+       'metrics_collector_external_port' in config['configurations']['cluster-env']:
+     metric_collector_port = config['configurations']['cluster-env']['metrics_collector_external_port']
+   else:
+     metric_collector_web_address = default("/configurations/ams-site/timeline.metrics.service.webapp.address", "0.0.0.0:6188")
+     if metric_collector_web_address.find(':') != -1:
+       metric_collector_port = metric_collector_web_address.split(':')[1]
+     else:
+       metric_collector_port = '6188'
+   if default("/configurations/ams-site/timeline.metrics.service.http.policy", "HTTP_ONLY") == "HTTPS_ONLY":
+     metric_collector_protocol = 'https'
+   else:
+     metric_collector_protocol = 'http'
+   metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
+   metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
+   metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
+ 
+   pass
+ metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60)
+ metrics_collection_period = default("/configurations/ams-site/timeline.metrics.sink.collection.period", 10)
+ 
+ host_in_memory_aggregation = default("/configurations/ams-site/timeline.metrics.host.inmemory.aggregation", True)
+ host_in_memory_aggregation_port = default("/configurations/ams-site/timeline.metrics.host.inmemory.aggregation.port", 61888)
+ 
+ # Cluster Zookeeper quorum
+ zookeeper_quorum = None
+ if has_zk_host:
+   if 'zoo.cfg' in config['configurations'] and 'clientPort' in config['configurations']['zoo.cfg']:
+     zookeeper_clientPort = config['configurations']['zoo.cfg']['clientPort']
+   else:
+     zookeeper_clientPort = '2181'
 -  zookeeper_quorum = (':' + zookeeper_clientPort + ',').join(config['clusterHostInfo']['zookeeper_hosts'])
++  zookeeper_quorum = (':' + zookeeper_clientPort + ',').join(config['clusterHostInfo']['zookeeper_server_hosts'])
+   # last port config
+   zookeeper_quorum += ':' + zookeeper_clientPort
+ 
+ #hadoop params
+ 
+ if has_namenode or dfs_type == 'HCFS':
+   hadoop_tmp_dir = format("/tmp/hadoop-{hdfs_user}")
+   hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+   task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
+ 
+ hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
+ hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+ hbase_tmp_dir = "/tmp/hbase-hbase"
+ #db params
 -server_db_name = config['hostLevelParams']['db_name']
 -db_driver_filename = config['hostLevelParams']['db_driver_filename']
 -oracle_driver_url = config['hostLevelParams']['oracle_jdbc_url']
 -mysql_driver_url = config['hostLevelParams']['mysql_jdbc_url']
+ oracle_driver_symlink_url = format("{ambari_server_resources_url}/oracle-jdbc-driver.jar")
+ mysql_driver_symlink_url = format("{ambari_server_resources_url}/mysql-jdbc-driver.jar")
+ 
 -ambari_db_rca_url = config['hostLevelParams']['ambari_db_rca_url'][0]
 -ambari_db_rca_driver = config['hostLevelParams']['ambari_db_rca_driver'][0]
 -ambari_db_rca_username = config['hostLevelParams']['ambari_db_rca_username'][0]
 -ambari_db_rca_password = config['hostLevelParams']['ambari_db_rca_password'][0]
 -
+ if has_namenode and 'rca_enabled' in config['configurations']['hadoop-env']:
+   rca_enabled =  config['configurations']['hadoop-env']['rca_enabled']
+ else:
+   rca_enabled = False
+ rca_disabled_prefix = "###"
+ if rca_enabled == True:
+   rca_prefix = ""
+ else:
+   rca_prefix = rca_disabled_prefix
+ 
+ #hadoop-env.sh
+ 
+ jsvc_path = "/usr/lib/bigtop-utils"
+ 
+ hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+ namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+ namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
+ namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+ namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m")
+ namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m")
+ 
+ jtnode_opt_newsize = "200m"
+ jtnode_opt_maxnewsize = "200m"
+ jtnode_heapsize =  "1024m"
+ ttnode_heapsize = "1024m"
+ 
+ dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+ mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+ mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+ 
+ #log4j.properties
+ 
+ yarn_log_dir_prefix = default("/configurations/yarn-env/yarn_log_dir_prefix","/var/log/hadoop-yarn")
+ 
+ dfs_hosts = default('/configurations/hdfs-site/dfs.hosts', None)
+ 
+ # Hdfs log4j settings
+ hadoop_log_max_backup_size = default('configurations/hdfs-log4j/hadoop_log_max_backup_size', 256)
+ hadoop_log_number_of_backup_files = default('configurations/hdfs-log4j/hadoop_log_number_of_backup_files', 10)
+ hadoop_security_log_max_backup_size = default('configurations/hdfs-log4j/hadoop_security_log_max_backup_size', 256)
+ hadoop_security_log_number_of_backup_files = default('configurations/hdfs-log4j/hadoop_security_log_number_of_backup_files', 20)
+ 
+ # Yarn log4j settings
+ yarn_rm_summary_log_max_backup_size = default('configurations/yarn-log4j/yarn_rm_summary_log_max_backup_size', 256)
+ yarn_rm_summary_log_number_of_backup_files = default('configurations/yarn-log4j/yarn_rm_summary_log_number_of_backup_files', 20)
+ 
+ #log4j.properties
+ if (('hdfs-log4j' in config['configurations']) and ('content' in config['configurations']['hdfs-log4j'])):
+   log4j_props = config['configurations']['hdfs-log4j']['content']
+   if (('yarn-log4j' in config['configurations']) and ('content' in config['configurations']['yarn-log4j'])):
+     log4j_props += config['configurations']['yarn-log4j']['content']
+ else:
+   log4j_props = None
+ 
+ refresh_topology = False
+ command_params = config["commandParams"] if "commandParams" in config else None
+ if command_params is not None:
+   refresh_topology = bool(command_params["refresh_topology"]) if "refresh_topology" in command_params else False
+ 
+ ambari_java_home = default("/commandParams/ambari_java_home", None)
+ ambari_jdk_name = default("/commandParams/ambari_jdk_name", None)
+ ambari_jce_name = default("/commandParams/ambari_jce_name", None)
+   
+ ambari_libs_dir = "/var/lib/ambari-agent/lib"
+ is_webhdfs_enabled = config['configurations']['hdfs-site']['dfs.webhdfs.enabled']
+ default_fs = config['configurations']['core-site']['fs.defaultFS']
+ 
+ #host info
+ all_hosts = default("/clusterHostInfo/all_hosts", [])
+ all_racks = default("/clusterHostInfo/all_racks", [])
+ all_ipv4_ips = default("/clusterHostInfo/all_ipv4_ips", [])
 -slave_hosts = default("/clusterHostInfo/slave_hosts", [])
++slave_hosts = default("/clusterHostInfo/datanode_hosts", [])
+ 
+ #topology files
+ net_topology_script_file_path = "/etc/hadoop/conf/topology_script.py"
+ net_topology_script_dir = os.path.dirname(net_topology_script_file_path)
+ net_topology_mapping_data_file_name = 'topology_mappings.data'
+ net_topology_mapping_data_file_path = os.path.join(net_topology_script_dir, net_topology_mapping_data_file_name)
+ 
+ #Added logic to create /tmp and /user directory for HCFS stack.  
+ has_core_site = 'core-site' in config['configurations']
+ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+ kinit_path_local = get_kinit_path()
 -stack_version_unformatted = config['hostLevelParams']['stack_version']
++stack_version_unformatted = config['clusterLevelParams']['stack_version']
+ stack_version_formatted = format_stack_version(stack_version_unformatted)
+ hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
+ hdfs_principal_name = default('/configurations/hadoop-env/hdfs_principal_name', None)
+ hdfs_site = config['configurations']['hdfs-site']
+ smoke_user =  config['configurations']['cluster-env']['smokeuser']
+ smoke_hdfs_user_dir = format("/user/{smoke_user}")
+ smoke_hdfs_user_mode = 0770
+ 
+ 
+ ##### Namenode RPC ports - metrics config section start #####
+ 
+ # Figure out the rpc ports for current namenode
+ nn_rpc_client_port = None
+ nn_rpc_dn_port = None
+ nn_rpc_healthcheck_port = None
+ 
+ namenode_id = None
+ namenode_rpc = None
+ 
+ dfs_ha_enabled = False
+ dfs_ha_nameservices = default('/configurations/hdfs-site/dfs.internal.nameservices', None)
+ if dfs_ha_nameservices is None:
+   dfs_ha_nameservices = default('/configurations/hdfs-site/dfs.nameservices', None)
+ dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"), None)
+ 
+ dfs_ha_namemodes_ids_list = []
+ other_namenode_id = None
+ 
+ if dfs_ha_namenode_ids:
+  dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",")
+  dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list)
+  if dfs_ha_namenode_ids_array_len > 1:
+    dfs_ha_enabled = True
+ 
+ if dfs_ha_enabled:
+  for nn_id in dfs_ha_namemodes_ids_list:
+    nn_host = config['configurations']['hdfs-site'][format('dfs.namenode.rpc-address.{dfs_ha_nameservices}.{nn_id}')]
+    if hostname.lower() in nn_host.lower():
+      namenode_id = nn_id
+      namenode_rpc = nn_host
+    pass
+  pass
+ else:
+   namenode_rpc = default('/configurations/hdfs-site/dfs.namenode.rpc-address', default_fs)
+ 
+ # if HDFS is not installed in the cluster, then don't try to access namenode_rpc
+ if "core-site" in config['configurations'] and namenode_rpc:
+   port_str = namenode_rpc.split(':')[-1].strip()
+   try:
+     nn_rpc_client_port = int(port_str)
+   except ValueError:
+     nn_rpc_client_port = None
+ 
+ if dfs_ha_enabled:
+  dfs_service_rpc_address = default(format('/configurations/hdfs-site/dfs.namenode.servicerpc-address.{dfs_ha_nameservices}.{namenode_id}'), None)
+  dfs_lifeline_rpc_address = default(format('/configurations/hdfs-site/dfs.namenode.lifeline.rpc-address.{dfs_ha_nameservices}.{namenode_id}'), None)
+ else:
+  dfs_service_rpc_address = default('/configurations/hdfs-site/dfs.namenode.servicerpc-address', None)
+  dfs_lifeline_rpc_address = default(format('/configurations/hdfs-site/dfs.namenode.lifeline.rpc-address'), None)
+ 
+ if dfs_service_rpc_address:
+  nn_rpc_dn_port = dfs_service_rpc_address.split(':')[1].strip()
+ 
+ if dfs_lifeline_rpc_address:
+  nn_rpc_healthcheck_port = dfs_lifeline_rpc_address.split(':')[1].strip()
+ 
+ is_nn_client_port_configured = False if nn_rpc_client_port is None else True
+ is_nn_dn_port_configured = False if nn_rpc_dn_port is None else True
+ is_nn_healthcheck_port_configured = False if nn_rpc_healthcheck_port is None else True
+ 
+ ##### end #####
+ 
+ import functools
+ #create partial functions with common arguments for every HdfsResource call
+ #to create/delete/copyfromlocal hdfs directories/files we need to call params.HdfsResource in code
+ HdfsResource = functools.partial(
+   HdfsResource,
+   user=hdfs_user,
+   hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
+   security_enabled = security_enabled,
+   keytab = hdfs_user_keytab,
+   kinit_path_local = kinit_path_local,
+   hadoop_bin_dir = hadoop_bin_dir,
+   hadoop_conf_dir = hadoop_conf_dir,
+   principal_name = hdfs_principal_name,
+   hdfs_site = hdfs_site,
+   default_fs = default_fs,
+   immutable_paths = get_not_managed_resources(),
+   dfs_type = dfs_type
+ )

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/main/resources/stack-hooks/before-START/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/stack-hooks/before-START/scripts/shared_initialization.py
index 0000000,48dc4b0..32870c9
mode 000000,100644..100644
--- a/ambari-server/src/main/resources/stack-hooks/before-START/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stack-hooks/before-START/scripts/shared_initialization.py
@@@ -1,0 -1,256 +1,256 @@@
+ """
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+ 
+     http://www.apache.org/licenses/LICENSE-2.0
+ 
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ 
+ """
+ 
+ import os
+ from resource_management.libraries.providers.hdfs_resource import WebHDFSUtil
+ from resource_management.core.resources.jcepolicyinfo import JcePolicyInfo
+ 
+ from resource_management import *
+ 
+ def setup_hadoop():
+   """
+   Setup hadoop files and directories
+   """
+   import params
+ 
+   Execute(("setenforce","0"),
+           only_if="test -f /selinux/enforce",
+           not_if="(! which getenforce ) || (which getenforce && getenforce | grep -q Disabled)",
+           sudo=True,
+   )
+ 
+   #directories
+   if params.has_namenode or params.dfs_type == 'HCFS':
+     Directory(params.hdfs_log_dir_prefix,
+               create_parents = True,
+               owner='root',
+               group=params.user_group,
+               mode=0775,
+               cd_access='a',
+     )
+     if params.has_namenode:
+       Directory(params.hadoop_pid_dir_prefix,
+               create_parents = True,
+               owner='root',
+               group='root',
+               cd_access='a',
+       )
+     Directory(params.hadoop_tmp_dir,
+               create_parents = True,
+               owner=params.hdfs_user,
+               cd_access='a',
+               )
+   #files
+     if params.security_enabled:
+       tc_owner = "root"
+     else:
+       tc_owner = params.hdfs_user
+       
+     # if WebHDFS is not enabled we need this jar to create hadoop folders and copy tarballs to HDFS.
+     if params.sysprep_skip_copy_fast_jar_hdfs:
+       print "Skipping copying of fast-hdfs-resource.jar as host is sys prepped"
+     elif params.dfs_type == 'HCFS' or not WebHDFSUtil.is_webhdfs_available(params.is_webhdfs_enabled, params.default_fs):
+       # for source-code of jar goto contrib/fast-hdfs-resource
+       File(format("{ambari_libs_dir}/fast-hdfs-resource.jar"),
+            mode=0644,
+            content=StaticFile("fast-hdfs-resource.jar")
+       )
+       
+     if os.path.exists(params.hadoop_conf_dir):
+       File(os.path.join(params.hadoop_conf_dir, 'commons-logging.properties'),
+            owner=tc_owner,
+            content=Template('commons-logging.properties.j2')
+       )
+ 
+       health_check_template_name = "health_check"
+       File(os.path.join(params.hadoop_conf_dir, health_check_template_name),
+            owner=tc_owner,
+            content=Template(health_check_template_name + ".j2")
+       )
+ 
+       log4j_filename = os.path.join(params.hadoop_conf_dir, "log4j.properties")
+       if (params.log4j_props != None):
+         File(log4j_filename,
+              mode=0644,
+              group=params.user_group,
+              owner=params.hdfs_user,
+              content=InlineTemplate(params.log4j_props)
+         )
+       elif (os.path.exists(format("{params.hadoop_conf_dir}/log4j.properties"))):
+         File(log4j_filename,
+              mode=0644,
+              group=params.user_group,
+              owner=params.hdfs_user,
+         )
+ 
+       if params.hadoop_metrics2_properties_content:
+         File(os.path.join(params.hadoop_conf_dir, "hadoop-metrics2.properties"),
+              owner=params.hdfs_user,
+              group=params.user_group,
+              content=InlineTemplate(params.hadoop_metrics2_properties_content)
+              )
+       else:
+         File(os.path.join(params.hadoop_conf_dir, "hadoop-metrics2.properties"),
+              owner=params.hdfs_user,
+              group=params.user_group,
+              content=Template("hadoop-metrics2.properties.j2")
+              )
+ 
+     if params.dfs_type == 'HCFS' and params.has_core_site and 'ECS_CLIENT' in params.component_list:
+        create_dirs()
+ 
+     create_microsoft_r_dir()
+ 
+ 
+ def setup_configs():
+   """
+   Creates configs for services HDFS mapred
+   """
+   import params
+ 
+   if params.has_namenode or params.dfs_type == 'HCFS':
+     if os.path.exists(params.hadoop_conf_dir):
+       File(params.task_log4j_properties_location,
+            content=StaticFile("task-log4j.properties"),
+            mode=0755
+       )
+ 
+     if os.path.exists(os.path.join(params.hadoop_conf_dir, 'configuration.xsl')):
+       File(os.path.join(params.hadoop_conf_dir, 'configuration.xsl'),
+            owner=params.hdfs_user,
+            group=params.user_group
+       )
+     if os.path.exists(os.path.join(params.hadoop_conf_dir, 'masters')):
+       File(os.path.join(params.hadoop_conf_dir, 'masters'),
+                 owner=params.hdfs_user,
+                 group=params.user_group
+       )
+ 
+ def create_javahome_symlink():
+   if os.path.exists("/usr/jdk/jdk1.6.0_31") and not os.path.exists("/usr/jdk64/jdk1.6.0_31"):
+     Directory("/usr/jdk64/",
+          create_parents = True,
+     )
+     Link("/usr/jdk/jdk1.6.0_31",
+          to="/usr/jdk64/jdk1.6.0_31",
+     )
+ 
+ def create_dirs():
+    import params
+    params.HdfsResource(params.hdfs_tmp_dir,
+                        type="directory",
+                        action="create_on_execute",
+                        owner=params.hdfs_user,
+                        mode=0777
+    )
+    params.HdfsResource(params.smoke_hdfs_user_dir,
+                        type="directory",
+                        action="create_on_execute",
+                        owner=params.smoke_user,
+                        mode=params.smoke_hdfs_user_mode
+    )
+    params.HdfsResource(None,
+                       action="execute"
+    )
+ 
+ def create_microsoft_r_dir():
+   import params
+   if 'MICROSOFT_R_NODE_CLIENT' in params.component_list and params.default_fs:
+     directory = '/user/RevoShare'
+     try:
+       params.HdfsResource(directory,
+                           type="directory",
+                           action="create_on_execute",
+                           owner=params.hdfs_user,
+                           mode=0777)
+       params.HdfsResource(None, action="execute")
+     except Exception as exception:
+       Logger.warning("Could not check the existence of {0} on DFS while starting {1}, exception: {2}".format(directory, params.current_service, str(exception)))
+ 
+ def setup_unlimited_key_jce_policy():
+   """
+   Sets up the unlimited key JCE policy if needed. (sets up ambari JCE as well if ambari and the  stack use different JDK)
+   """
+   import params
+   __setup_unlimited_key_jce_policy(custom_java_home=params.java_home, custom_jdk_name=params.jdk_name, custom_jce_name = params.jce_policy_zip)
+   if params.ambari_jce_name and params.ambari_jce_name != params.jce_policy_zip:
+     __setup_unlimited_key_jce_policy(custom_java_home=params.ambari_java_home, custom_jdk_name=params.ambari_jdk_name, custom_jce_name = params.ambari_jce_name)
+ 
+ def __setup_unlimited_key_jce_policy(custom_java_home, custom_jdk_name, custom_jce_name):
+   """
+   Sets up the unlimited key JCE policy if needed.
+ 
+   The following criteria must be met:
+ 
+     * The cluster has not been previously prepared (sys preped) - cluster-env/sysprep_skip_setup_jce = False
 -    * Ambari is managing the host's JVM - /hostLevelParams/jdk_name is set
 -    * Either security is enabled OR a service requires it - /hostLevelParams/unlimited_key_jce_required = True
++    * Ambari is managing the host's JVM - /ambariLevelParams/jdk_name is set
++    * Either security is enabled OR a service requires it - /componentLevelParams/unlimited_key_jce_required = True
+     * The unlimited key JCE policy has not already been installed
+ 
+   If the conditions are met, the following steps are taken to install the unlimited key JCE policy JARs
+ 
+     1. The unlimited key JCE policy ZIP file is downloaded from the Ambari server and stored in the
+         Ambari agent's temporary directory
+     2. The existing JCE policy JAR files are deleted
+     3. The downloaded ZIP file is unzipped into the proper JCE policy directory
+ 
+   :return: None
+   """
+   import params
+ 
+   if params.sysprep_skip_setup_jce:
+     Logger.info("Skipping unlimited key JCE policy check and setup since the host is sys prepped")
+ 
+   elif not custom_jdk_name:
+     Logger.debug("Skipping unlimited key JCE policy check and setup since the Java VM is not managed by Ambari")
+ 
+   elif not params.unlimited_key_jce_required:
+     Logger.debug("Skipping unlimited key JCE policy check and setup since it is not required")
+ 
+   else:
+     jcePolicyInfo = JcePolicyInfo(custom_java_home)
+ 
+     if jcePolicyInfo.is_unlimited_key_jce_policy():
+       Logger.info("The unlimited key JCE policy is required, and appears to have been installed.")
+ 
+     elif custom_jce_name is None:
+       raise Fail("The unlimited key JCE policy needs to be installed; however the JCE policy zip is not specified.")
+ 
+     else:
+       Logger.info("The unlimited key JCE policy is required, and needs to be installed.")
+ 
+       jce_zip_target = format("{artifact_dir}/{custom_jce_name}")
+       jce_zip_source = format("{ambari_server_resources_url}/{custom_jce_name}")
+       java_security_dir = format("{custom_java_home}/jre/lib/security")
+ 
+       Logger.debug("Downloading the unlimited key JCE policy files from {0} to {1}.".format(jce_zip_source, jce_zip_target))
+       Directory(params.artifact_dir, create_parents=True)
+       File(jce_zip_target, content=DownloadSource(jce_zip_source))
+ 
+       Logger.debug("Removing existing JCE policy JAR files: {0}.".format(java_security_dir))
+       File(format("{java_security_dir}/US_export_policy.jar"), action="delete")
+       File(format("{java_security_dir}/local_policy.jar"), action="delete")
+ 
+       Logger.debug("Unzipping the unlimited key JCE policy files from {0} into {1}.".format(jce_zip_target, java_security_dir))
+       extract_cmd = ("unzip", "-o", "-j", "-q", jce_zip_target, "-d", java_security_dir)
+       Execute(extract_cmd,
+               only_if=format("test -e {java_security_dir} && test -f {jce_zip_target}"),
+               path=['/bin/', '/usr/bin'],
+               sudo=True
+               )

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/test/java/org/apache/ambari/server/checks/DruidHighAvailabilityCheckTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/test/java/org/apache/ambari/server/controller/utilities/KerberosIdentityCleanerTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/HostVersionOutOfSyncListenerTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerUsersyncConfigCalculationTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelperTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelperTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-web/app/app.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-web/app/controllers/main/host/details.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --cc ambari-web/app/messages.js
index 684adbd,0123356..4a352ed
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@@ -357,7 -358,7 +358,8 @@@ Em.I18n.translations = 
    'common.repositoryType': 'Repository Type',
    'common.rolling.downgrade': 'Rolling Downgrade',
    'common.express.downgrade': 'Express Downgrade',
 +  'common.minute.ago': 'less than a minute ago',
+   'common.views': 'Views',
  
    'models.alert_instance.tiggered.verbose': "Occurred on {0} <br> Checked on {1}",
    'models.alert_definition.triggered.verbose': "Occurred on {0}",

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-web/app/templates/application.hbs
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/af30ab40/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------


[26/50] [abbrv] ambari git commit: AMBARI-22220 - Should be able to switch the extension version to which a stack version is linked

Posted by ao...@apache.org.
AMBARI-22220 - Should be able to switch the extension version to which a stack version is linked


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5f86f159
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5f86f159
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5f86f159

Branch: refs/heads/branch-3.0-perf
Commit: 5f86f159fae5f65b25ef77b5c9eb9cb7d1179834
Parents: 0317cf7
Author: Tim Thorpe <tt...@apache.org>
Authored: Mon Oct 16 06:16:35 2017 -0700
Committer: Tim Thorpe <tt...@apache.org>
Committed: Mon Oct 16 06:16:35 2017 -0700

----------------------------------------------------------------------
 .../controller/AmbariManagementController.java  |   6 +-
 .../AmbariManagementControllerImpl.java         |  50 ++++----
 .../controller/AmbariManagementHelper.java      |  26 ++++
 .../internal/ExtensionLinkResourceProvider.java |  15 +++
 .../ambari/server/stack/ExtensionHelper.java    |  31 ++++-
 .../server/stack/StackManagerExtensionTest.java |  22 +++-
 .../resources/extensions/EXT/0.2/metainfo.xml   |   4 +-
 .../resources/extensions/EXT/0.4/metainfo.xml   |  32 +++++
 .../EXT/0.4/services/OOZIE2/metainfo.xml        | 118 +++++++++++++++++++
 .../services/OOZIE2/themes/broken_theme.json    |   3 +
 10 files changed, 273 insertions(+), 34 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5f86f159/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementController.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementController.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementController.java
index f0f13e1..45f9b3a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementController.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementController.java
@@ -369,18 +369,18 @@ public interface AmbariManagementController {
   void createExtensionLink(ExtensionLinkRequest request) throws AmbariException;
 
   /**
-   * Update a link between an extension and a stack
+   * Update a link - switch the link's extension version while keeping the same stack version and extension name
    *
    * @throws AmbariException if we fail to link the extension to the stack
    */
   void updateExtensionLink(ExtensionLinkRequest request) throws AmbariException;
 
   /**
-   * Update a link between an extension and a stack
+   * Update a link - switch the link's extension version while keeping the same stack version and extension name
    *
    * @throws AmbariException if we fail to link the extension to the stack
    */
-  void updateExtensionLink(ExtensionLinkEntity linkEntity) throws AmbariException;
+  void updateExtensionLink(ExtensionLinkEntity oldLinkEntity, ExtensionLinkRequest newLinkRequest) throws AmbariException;
 
   /**
    * Delete a link between an extension and a stack

http://git-wip-us.apache.org/repos/asf/ambari/blob/5f86f159/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index b0eb8ac..1b1f524 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -1126,7 +1126,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     try {
       cluster = clusters.getCluster(request.getClusterName());
     } catch (ClusterNotFoundException e) {
-      LOG.info(e.getMessage());
+      LOG.error("Cluster not found ", e);
       throw new ParentObjectNotFoundException("Parent Cluster resource doesn't exist", e);
     }
 
@@ -4795,9 +4795,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       properties = ambariMetaInfo.getServiceProperties(stackName, stackVersion, serviceName);
     }
     for (PropertyInfo property: properties) {
-      if (property.shouldBeConfigured()) {
-        response.add(property.convertToResponse());
-      }
+      response.add(property.convertToResponse());
     }
 
     return response;
@@ -5622,12 +5620,9 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
   }
 
   /**
-   * This method will update a link between an extension version and a stack version (Extension Link).
-   * Updating will only force ambari server to reread the stack and extension directories.
+   * Update a link - switch the link's extension version while keeping the same stack version and extension name
    *
-   * An extension version is like a stack version but it contains custom services.  Linking an extension
-   * version to the current stack version allows the cluster to install the custom services contained in
-   * the extension version.
+   * @throws AmbariException if we fail to link the extension to the stack
    */
   @Override
   public void updateExtensionLink(ExtensionLinkRequest request) throws AmbariException {
@@ -5641,32 +5636,43 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       throw new AmbariException("Unable to find extension link"
             + ", linkId=" + request.getLinkId(), e);
     }
-    updateExtensionLink(linkEntity);
+    updateExtensionLink(linkEntity, request);
   }
 
   /**
-   * This method will update a link between an extension version and a stack version (Extension Link).
-   * Updating will only force ambari server to reread the stack and extension directories.
+   * Update a link - switch the link's extension version while keeping the same stack version and extension name
    *
-   * An extension version is like a stack version but it contains custom services.  Linking an extension
-   * version to the current stack version allows the cluster to install the custom services contained in
-   * the extension version.
+   * @throws AmbariException if we fail to link the extension to the stack
    */
   @Override
-  public void updateExtensionLink(ExtensionLinkEntity linkEntity) throws AmbariException {
-    StackInfo stackInfo = ambariMetaInfo.getStack(linkEntity.getStack().getStackName(), linkEntity.getStack().getStackVersion());
+  public void updateExtensionLink(ExtensionLinkEntity oldLinkEntity, ExtensionLinkRequest newLinkRequest) throws AmbariException {
+    StackInfo stackInfo = ambariMetaInfo.getStack(oldLinkEntity.getStack().getStackName(), oldLinkEntity.getStack().getStackVersion());
 
     if (stackInfo == null) {
-      throw new StackAccessException("stackName=" + linkEntity.getStack().getStackName() + ", stackVersion=" + linkEntity.getStack().getStackVersion());
+      throw new StackAccessException(String.format("stackName=%s, stackVersion=%s", oldLinkEntity.getStack().getStackName(), oldLinkEntity.getStack().getStackVersion()));
     }
 
-    ExtensionInfo extensionInfo = ambariMetaInfo.getExtension(linkEntity.getExtension().getExtensionName(), linkEntity.getExtension().getExtensionVersion());
+    if (newLinkRequest.getExtensionName() == null || newLinkRequest.getExtensionVersion() == null) {
+      throw new AmbariException(String.format("Invalid extension name or version: %s/%s",
+		  newLinkRequest.getExtensionName(), newLinkRequest.getExtensionVersion()));
+    }
 
-    if (extensionInfo == null) {
-      throw new StackAccessException("extensionName=" + linkEntity.getExtension().getExtensionName() + ", extensionVersion=" + linkEntity.getExtension().getExtensionVersion());
+    if (!newLinkRequest.getExtensionName().equals(oldLinkEntity.getExtension().getExtensionName())) {
+      throw new AmbariException(String.format("Update is not allowed to switch the extension name, only the version.  Old name/new name: %s/%s",
+		  oldLinkEntity.getExtension().getExtensionName(), newLinkRequest.getExtensionName()));
+    }
+
+    ExtensionInfo oldExtensionInfo = ambariMetaInfo.getExtension(oldLinkEntity.getExtension().getExtensionName(), oldLinkEntity.getExtension().getExtensionVersion());
+    ExtensionInfo newExtensionInfo = ambariMetaInfo.getExtension(newLinkRequest.getExtensionName(), newLinkRequest.getExtensionVersion());
+
+    if (oldExtensionInfo == null) {
+      throw new StackAccessException(String.format("Old extensionName=%s, extensionVersion=%s", oldLinkEntity.getExtension().getExtensionName(), oldLinkEntity.getExtension().getExtensionVersion()));
+    }
+    if (newExtensionInfo == null) {
+      throw new StackAccessException(String.format("New extensionName=%s, extensionVersion=%s", newLinkRequest.getExtensionName(), newLinkRequest.getExtensionVersion()));
     }
 
-    ambariMetaInfo.getStackManager().linkStackToExtension(stackInfo, extensionInfo);
+    helper.updateExtensionLink(ambariMetaInfo.getStackManager(), oldLinkEntity, stackInfo, oldExtensionInfo, newExtensionInfo);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/5f86f159/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementHelper.java
index 0c8edfe..e98c2e9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementHelper.java
@@ -162,6 +162,32 @@ public class AmbariManagementHelper {
     }
   }
 
+  /**
+   * Updates the extension version of the currently linked extension to the stack version
+   */
+  public void updateExtensionLink(StackManager stackManager, ExtensionLinkEntity linkEntity, StackInfo stackInfo,
+                                  ExtensionInfo oldExtensionInfo, ExtensionInfo newExtensionInfo) throws AmbariException {
+    //validateUpdateExtensionLinkRequest(stackInfo, extensionInfo);
+    ExtensionHelper.validateUpdateLink(stackManager, stackInfo, oldExtensionInfo, newExtensionInfo);
+
+    ExtensionEntity extension = extensionDAO.find(newExtensionInfo.getName(), newExtensionInfo.getVersion());
+    linkEntity.setExtension(extension);
+
+    try {
+      linkEntity = linkDAO.merge(linkEntity);
+    } catch (RollbackException e) {
+      String message = "Unable to update extension link";
+      LOG.debug(message, e);
+      String errorMessage = message
+              + ", stackName=" + stackInfo.getName()
+              + ", stackVersion=" + stackInfo.getVersion()
+              + ", extensionName=" + newExtensionInfo.getName()
+              + ", extensionVersion=" + newExtensionInfo.getVersion();
+      LOG.warn(errorMessage);
+      throw new AmbariException(errorMessage, e);
+    }
+  }
+
   private ExtensionLinkEntity createExtensionLinkEntity(StackInfo stackInfo, ExtensionInfo extensionInfo) throws AmbariException {
     StackEntity stack = stackDAO.find(stackInfo.getName(), stackInfo.getVersion());
     ExtensionEntity extension = extensionDAO.find(extensionInfo.getName(), extensionInfo.getVersion());

http://git-wip-us.apache.org/repos/asf/ambari/blob/5f86f159/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ExtensionLinkResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ExtensionLinkResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ExtensionLinkResourceProvider.java
index 9b894ff..db904bf 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ExtensionLinkResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ExtensionLinkResourceProvider.java
@@ -196,6 +196,21 @@ public class ExtensionLinkResourceProvider extends AbstractControllerResourcePro
         throws SystemException, UnsupportedPropertyException,
         NoSuchResourceException, NoSuchParentResourceException {
 
+    final Set<ExtensionLinkRequest> requests = new HashSet<>();
+    for (Map<String, Object> propertyMap : request.getProperties()) {
+      requests.add(getRequest(propertyMap));
+    }
+
+    RequestStatusResponse response = modifyResources(new Command<RequestStatusResponse>() {
+      @Override
+      public RequestStatusResponse invoke() throws AmbariException {
+        for (ExtensionLinkRequest extensionLinkRequest : requests) {
+          getManagementController().updateExtensionLink(extensionLinkRequest);
+        }
+        return null;
+      }
+    });
+
     //Need to reread the stacks/extensions directories so the latest information is available
     try {
       getManagementController().updateStacks();

http://git-wip-us.apache.org/repos/asf/ambari/blob/5f86f159/ambari-server/src/main/java/org/apache/ambari/server/stack/ExtensionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ExtensionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ExtensionHelper.java
index 91dc870..3a143f4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/ExtensionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ExtensionHelper.java
@@ -18,6 +18,9 @@
 
 package org.apache.ambari.server.stack;
 
+import java.util.ArrayList;
+import java.util.Collection;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ServiceNotFoundException;
 import org.apache.ambari.server.state.Cluster;
@@ -72,6 +75,12 @@ public class ExtensionHelper {
     validateRequiredExtensions(stack, extension);
   }
 
+  public static void validateUpdateLink(StackManager stackManager, StackInfo stack, ExtensionInfo oldExtension, ExtensionInfo newExtension) throws AmbariException {
+    validateSupportedStackVersion(stack, newExtension);
+    validateServiceDuplication(stackManager, stack, oldExtension, newExtension);
+    validateRequiredExtensions(stack, newExtension);
+  }
+
   private static void validateSupportedStackVersion(StackInfo stack, ExtensionInfo extension) throws AmbariException {
     for (ExtensionMetainfoXml.Stack validStack : extension.getStacks()) {
       if (validStack.getName().equals(stack.getName())) {
@@ -93,8 +102,28 @@ public class ExtensionHelper {
   }
 
   private static void validateServiceDuplication(StackManager stackManager, StackInfo stack, ExtensionInfo extension) throws AmbariException {
+    validateServiceDuplication(stackManager, stack, extension, extension.getServices());
+  }
+
+  private static void validateServiceDuplication(StackManager stackManager, StackInfo stack, ExtensionInfo oldExtension, ExtensionInfo newExtension) throws AmbariException {
+    ArrayList<ServiceInfo> services = new ArrayList<>(newExtension.getServices().size());
+    for (ServiceInfo service : newExtension.getServices()) {
+      boolean found = false;
+      for (ServiceInfo current : oldExtension.getServices()) {
+        if (service.getName().equals(current.getName())) {
+          found = true;
+        }
+      }
+      if (!found) {
+        services.add(service);
+      }
+    }
+    validateServiceDuplication(stackManager, stack, newExtension, services);
+  }
+
+  private static void validateServiceDuplication(StackManager stackManager, StackInfo stack, ExtensionInfo extension, Collection<ServiceInfo> services) throws AmbariException {
     LOG.debug("Looking for duplicate services");
-    for (ServiceInfo service : extension.getServices()) {
+    for (ServiceInfo service : services) {
       LOG.debug("Looking for duplicate service " + service.getName());
       if (service != null) {
         ServiceInfo stackService = null;

http://git-wip-us.apache.org/repos/asf/ambari/blob/5f86f159/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerExtensionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerExtensionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerExtensionTest.java
index cef30b5..6617b33 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerExtensionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerExtensionTest.java
@@ -81,6 +81,9 @@ public class StackManagerExtensionTest  {
     ExtensionEntity extension3 = new ExtensionEntity();
     extension3.setExtensionName("EXT");
     extension3.setExtensionVersion("0.3");
+    ExtensionEntity extension4 = new ExtensionEntity();
+    extension4.setExtensionName("EXT");
+    extension4.setExtensionVersion("0.4");
     ExtensionLinkEntity link1 = new ExtensionLinkEntity();
     link1.setLinkId(new Long(-1));
     link1.setStack(stack1);
@@ -96,6 +99,7 @@ public class StackManagerExtensionTest  {
     expect(extensionDao.find("EXT", "0.1")).andReturn(extension1).atLeastOnce();
     expect(extensionDao.find("EXT", "0.2")).andReturn(extension2).atLeastOnce();
     expect(extensionDao.find("EXT", "0.3")).andReturn(extension3).atLeastOnce();
+    expect(extensionDao.find("EXT", "0.4")).andReturn(extension4).atLeastOnce();
 
     expect(linkDao.findByStack("HDP", "0.1")).andReturn(linkList).atLeastOnce();
     expect(linkDao.findByStack(EasyMock.anyObject(String.class),
@@ -104,6 +108,8 @@ public class StackManagerExtensionTest  {
     expect(linkDao.findByStackAndExtension("HDP", "0.2", "EXT", "0.2")).andReturn(null).atLeastOnce();
     expect(linkDao.findByStackAndExtension("HDP", "0.1", "EXT", "0.1")).andReturn(link1).atLeastOnce();
 
+    expect(linkDao.merge(link1)).andReturn(link1).atLeastOnce();
+
     replay(actionMetadata, stackDao, metaInfoDao, osFamily, extensionDao, linkDao); //linkEntity
 
     String stacks = ClassLoader.getSystemClassLoader().getResource("stacks_with_extensions").getPath();
@@ -144,7 +150,7 @@ public class StackManagerExtensionTest  {
     assertNotNull("EXT 0.2's parent: " + extension.getParentExtensionVersion(), extension.getParentExtensionVersion());
     assertEquals("EXT 0.2's parent: " + extension.getParentExtensionVersion(), "0.1", extension.getParentExtensionVersion());
     assertNotNull(extension.getService("OOZIE2"));
-    assertTrue("Extension is not set to auto link", extension.isAutoLink());
+    assertTrue("Extension is set to auto link", !extension.isAutoLink());
     oozie = extension.getService("OOZIE2");
     assertNotNull("Package dir is " + oozie.getServicePackageFolder(), oozie.getServicePackageFolder());
     assertTrue("Package dir is " + oozie.getServicePackageFolder(), oozie.getServicePackageFolder().contains("extensions/EXT/0.1/services/OOZIE2/package"));
@@ -156,18 +162,24 @@ public class StackManagerExtensionTest  {
     assertNotNull(themes);
     assertTrue("Number of themes is " + themes.size(), themes.size() == 0);
 
+    extension = stackManager.getExtension("EXT", "0.3");
+    assertTrue("Extension is not set to auto link", extension.isAutoLink());
+
     StackInfo stack = stackManager.getStack("HDP", "0.1");
     assertNotNull(stack.getService("OOZIE2"));
     oozie = stack.getService("OOZIE2");
     assertNotNull("Package dir is " + oozie.getServicePackageFolder(), oozie.getServicePackageFolder());
     assertTrue("Package dir is " + oozie.getServicePackageFolder(), oozie.getServicePackageFolder().contains("extensions/EXT/0.1/services/OOZIE2/package"));
     assertEquals(oozie.getVersion(), "3.2.0");
-
     assertTrue("Extensions found: " + stack.getExtensions().size(), stack.getExtensions().size() == 1);
     extension = stack.getExtensions().iterator().next();
     assertEquals("Extension name: " + extension.getName(), extension.getName(), "EXT");
     assertEquals("Extension version: " + extension.getVersion(), extension.getVersion(), "0.1");
 
+    ExtensionInfo extensionInfo2 = stackManager.getExtension("EXT", "0.2");
+    helper.updateExtensionLink(stackManager, link1, stack, extension, extensionInfo2);
+    assertEquals(link1.getExtension().getExtensionVersion(), link1.getExtension().getExtensionVersion(), "0.2");
+
     stack = stackManager.getStack("HDP", "0.2");
     assertTrue("Extensions found: " + stack.getExtensions().size(), stack.getExtensions().size() == 0);
 
@@ -177,15 +189,13 @@ public class StackManagerExtensionTest  {
     assertNotNull(extension.getService("OOZIE2"));
     oozie = extension.getService("OOZIE2");
     assertEquals(oozie.getVersion(), "4.0.0");
-
     assertEquals("Extension name: " + extension.getName(), extension.getName(), "EXT");
-    assertEquals("Extension version: " + extension.getVersion(), extension.getVersion(), "0.3");
+    assertEquals("Extension version: " + extension.getVersion(), extension.getVersion(), "0.4");
 
     stack = stackManager.getStack("HDP", "0.4");
     assertTrue("Extensions found: " + stack.getExtensions().size(), stack.getExtensions().size() == 1);
     extension = stack.getExtensions().iterator().next();
     assertEquals("Extension name: " + extension.getName(), extension.getName(), "EXT");
-    assertEquals("Extension version: " + extension.getVersion(), extension.getVersion(), "0.3");
+    assertEquals("Extension version: " + extension.getVersion(), extension.getVersion(), "0.4");
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5f86f159/ambari-server/src/test/resources/extensions/EXT/0.2/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/extensions/EXT/0.2/metainfo.xml b/ambari-server/src/test/resources/extensions/EXT/0.2/metainfo.xml
index c95a20f..fa84c53 100644
--- a/ambari-server/src/test/resources/extensions/EXT/0.2/metainfo.xml
+++ b/ambari-server/src/test/resources/extensions/EXT/0.2/metainfo.xml
@@ -20,12 +20,12 @@
     <active>true</active>
   </versions>
   <extends>0.1</extends>
-  <auto-link>true</auto-link>
+  <auto-link>false</auto-link>
   <prerequisites>
     <min-stack-versions>
       <stack>
         <name>HDP</name>
-        <version>0.3</version>
+        <version>0.1</version>
       </stack>
     </min-stack-versions>
   </prerequisites>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5f86f159/ambari-server/src/test/resources/extensions/EXT/0.4/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/extensions/EXT/0.4/metainfo.xml b/ambari-server/src/test/resources/extensions/EXT/0.4/metainfo.xml
new file mode 100644
index 0000000..0e74813
--- /dev/null
+++ b/ambari-server/src/test/resources/extensions/EXT/0.4/metainfo.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <versions>
+    <active>true</active>
+  </versions>
+  <extends>0.3</extends>
+  <auto-link>true</auto-link>
+  <prerequisites>
+    <min-stack-versions>
+      <stack>
+        <name>HDP</name>
+        <version>0.3</version>
+      </stack>
+    </min-stack-versions>
+  </prerequisites>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5f86f159/ambari-server/src/test/resources/extensions/EXT/0.4/services/OOZIE2/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/extensions/EXT/0.4/services/OOZIE2/metainfo.xml b/ambari-server/src/test/resources/extensions/EXT/0.4/services/OOZIE2/metainfo.xml
new file mode 100644
index 0000000..9176551
--- /dev/null
+++ b/ambari-server/src/test/resources/extensions/EXT/0.4/services/OOZIE2/metainfo.xml
@@ -0,0 +1,118 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>OOZIE2</name>
+      <comment>System for workflow coordination and execution of Apache Hadoop jobs</comment>
+      <version>4.0.0</version>
+
+      <components>
+        <component>
+          <name>OOZIE2_SERVER</name>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <dependencies>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>MAPREDUCE/MAPREDUCE_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/oozie2_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
+
+        <component>
+          <name>OOZIE2_CLIENT</name>
+          <category>CLIENT</category>
+          <cardinality>0+</cardinality>
+          <dependencies>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>MAPREDUCE/MAPREDUCE_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/oozie2_client.py</script>
+            <scriptType>PYTHON</scriptType>
+          </commandScript>
+        </component>
+      </components>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>oozie2.noarch</name>
+            </package>
+            <package>
+              <name>oozie2-client.noarch</name>
+            </package>
+            <package>
+              <name>extjs-2.2-1</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+      <commandScript>
+        <script>scripts/service_check.py</script>
+        <scriptType>PYTHON</scriptType>
+        <timeout>300</timeout>
+      </commandScript>
+
+      <configuration-dependencies>
+        <config-type>global</config-type>
+        <config-type>oozie2-site</config-type>
+      </configuration-dependencies>
+
+      <themes>
+        <theme>
+          <fileName>broken_theme.json</fileName>
+          <default>true</default>
+        </theme>
+      </themes>
+
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5f86f159/ambari-server/src/test/resources/extensions/EXT/0.4/services/OOZIE2/themes/broken_theme.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/extensions/EXT/0.4/services/OOZIE2/themes/broken_theme.json b/ambari-server/src/test/resources/extensions/EXT/0.4/services/OOZIE2/themes/broken_theme.json
new file mode 100644
index 0000000..6e8b5bf
--- /dev/null
+++ b/ambari-server/src/test/resources/extensions/EXT/0.4/services/OOZIE2/themes/broken_theme.json
@@ -0,0 +1,3 @@
+{
+  "configuration": {
+}


[10/50] [abbrv] ambari git commit: AMBARI-22229.Handle upload of interpreter.json to remote storage in Ambari(Prabhjyot Singh via Venkata Sairam)

Posted by ao...@apache.org.
AMBARI-22229.Handle upload of interpreter.json to remote storage in Ambari(Prabhjyot Singh via Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a8ba5e61
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a8ba5e61
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a8ba5e61

Branch: refs/heads/branch-3.0-perf
Commit: a8ba5e61c50ac92b787d3b2de4cec8d29da92d74
Parents: 0f76c7f
Author: Venkata Sairam <ve...@gmail.com>
Authored: Fri Oct 13 15:22:33 2017 +0530
Committer: Venkata Sairam <ve...@gmail.com>
Committed: Fri Oct 13 15:22:33 2017 +0530

----------------------------------------------------------------------
 .../common-services/ZEPPELIN/0.7.0/package/scripts/master.py | 3 ++-
 .../src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py | 8 ++++----
 2 files changed, 6 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a8ba5e61/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
index a8b1b32..6a84d79 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
@@ -305,7 +305,8 @@ class Master(Script):
   def get_zeppelin_conf_FS_directory(self, params):
     hdfs_interpreter_config = params.config['configurations']['zeppelin-config']['zeppelin.config.fs.dir']
 
-    if not hdfs_interpreter_config.startswith("/"):
+    # if it doesn't start from "/" or doesn't contains "://" as in hdfs://, file://, etc then make it a absolute path
+    if not (hdfs_interpreter_config.startswith("/") or '://' in hdfs_interpreter_config):
       hdfs_interpreter_config = "/user/" + format("{zeppelin_user}") + "/" + hdfs_interpreter_config
 
     return hdfs_interpreter_config

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8ba5e61/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
index e5d0240..400350c 100644
--- a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
+++ b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
@@ -342,7 +342,7 @@ class TestZeppelin070(RMFTestCase):
                               )
 
     self.assertResourceCalled('HdfsResource',
-                              '/user/zeppelin/hdfs:///user/zeppelin/conf',
+                              'hdfs:///user/zeppelin/conf',
                               security_enabled=False,
                               hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               keytab=UnknownConfigurationMock(),
@@ -368,7 +368,7 @@ class TestZeppelin070(RMFTestCase):
 
 
     self.assertResourceCalled('HdfsResource',
-                              '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
+                              'hdfs:///user/zeppelin/conf/interpreter.json',
                               security_enabled=False,
                               hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               keytab=UnknownConfigurationMock(),
@@ -395,7 +395,7 @@ class TestZeppelin070(RMFTestCase):
                               )
 
     self.assertResourceCalled('HdfsResource',
-                              '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
+                              'hdfs:///user/zeppelin/conf/interpreter.json',
                               security_enabled=False,
                               hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               keytab=UnknownConfigurationMock(),
@@ -421,7 +421,7 @@ class TestZeppelin070(RMFTestCase):
                               group='zeppelin',
                               )
 
-    self.assertResourceCalled('HdfsResource', '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
+    self.assertResourceCalled('HdfsResource', 'hdfs:///user/zeppelin/conf/interpreter.json',
         security_enabled = False,
         hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
         keytab = UnknownConfigurationMock(),


[38/50] [abbrv] ambari git commit: Updated team page. (yusaku)

Posted by ao...@apache.org.
Updated team page. (yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/881e15d8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/881e15d8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/881e15d8

Branch: refs/heads/branch-3.0-perf
Commit: 881e15d8d5c0ad3718e50e4427b8948c723ac255
Parents: b9f2670
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Tue Oct 17 14:35:15 2017 -0700
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Tue Oct 17 14:35:15 2017 -0700

----------------------------------------------------------------------
 docs/pom.xml | 49 ++++++++++++++++++++++++++++++++++++++++++++++++-
 1 file changed, 48 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/881e15d8/docs/pom.xml
----------------------------------------------------------------------
diff --git a/docs/pom.xml b/docs/pom.xml
index e041002..a75e093 100644
--- a/docs/pom.xml
+++ b/docs/pom.xml
@@ -196,7 +196,19 @@
             <organization>
                 Hortonworks
             </organization>
-        </developer>        
+        </developer>
+        <developer>
+            <id>anitajebaraj</id>
+            <name>Anita Jebaraj</name>
+            <email>anitajebaraj@apache.org</email>
+            <timezone></timezone>
+            <roles>
+                <role>Committer</role>
+            </roles>
+            <organization>
+                IBM
+            </organization>
+        </developer>
         <developer>
             <id>atkach</id>
             <name>Andrii Tkach</name>
@@ -362,6 +374,17 @@
             </organization>            
         </developer>
         <developer>
+            <id>dgergely</id>
+            <name>Daniel Gergely</name>
+            <email>dgergely</email>
+            <timezone>+1</timezone>
+            <roles>
+                <role>Committer</role>
+            </roles>
+            <organization>
+            </organization>
+        </developer>
+        <developer>
             <id>dillidorai</id>
             <name>Dilli Dorai</name>
             <email>dillidorai@apache.org</email>
@@ -852,6 +875,18 @@
             </organization>
         </developer>
         <developer>
+            <id>qin</id>
+            <name>Qin Liu</name>
+            <email>qin@apache.org</email>
+            <timezone></timezone>
+            <roles>
+                <role>Committer</role>
+            </roles>
+            <organization>
+                IBM
+            </organization>
+        </developer>
+        <developer>
             <id>ramya</id>
             <name>Ramya Sunil</name>
             <email>ramya@apache.org</email>
@@ -1137,6 +1172,18 @@
             </organization>
         </developer>       
         <developer>
+            <id>vishalsuvagia</id>
+            <name>Vishal Suvagia</name>
+            <email>vishalsuvagia@apache.org</email>
+            <timezone></timezone>
+            <roles>
+                <role>Committer</role>
+            </roles>
+            <organization>
+                Hortonworks
+            </organization>
+        </developer>
+        <developer>
             <id>vsairam</id>
             <name>Venkata Sairam Lanka</name>
             <email>vsairam@apache.org</email>


[22/50] [abbrv] ambari git commit: AMBARI-22216. Ambari Schema Upgrade Failed during Ambari Upgrade - workaround (dlysnichenko)

Posted by ao...@apache.org.
AMBARI-22216. Ambari Schema Upgrade Failed during Ambari Upgrade - workaround (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e219186e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e219186e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e219186e

Branch: refs/heads/branch-3.0-perf
Commit: e219186ea68a869ee0b7929dfd6c5322cf8c1e7b
Parents: df10813
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Mon Oct 16 12:13:01 2017 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Mon Oct 16 12:13:47 2017 +0300

----------------------------------------------------------------------
 .../ambari/server/upgrade/UpgradeCatalog260.java | 19 +++++++++++++++++++
 1 file changed, 19 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e219186e/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
index 866a501..71a0ff3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
@@ -81,6 +81,7 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
   public static final String DESIRED_REPO_VERSION_ID_COLUMN = "desired_repo_version_id";
   public static final String REPO_STATE_COLUMN = "repo_state";
   public static final String FK_SCDS_DESIRED_STACK_ID = "FK_scds_desired_stack_id";
+  public static final String FK_SERVICECOMPONENTDESIREDSTATE_DESIRED_STACK_ID = "FK_servicecomponentdesiredstate_desired_stack_id";
   public static final String FK_SCDS_DESIRED_REPO_ID = "FK_scds_desired_repo_id";
 
   public static final String REPO_VERSION_TABLE = "repo_version";
@@ -152,6 +153,9 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
   public static final String STALE_POSTGRESS_USERS_LDAP_USER_KEY = "users_ldap_user_key";
   public static final String SHORT_URL_COLUMN = "short_url";
   public static final String FK_INSTANCE_URL_ID = "FK_instance_url_id";
+  public static final String FK_SERVICEDESIREDSTATE_DESIRED_STACK_ID = "FK_servicedesiredstate_desired_stack_id";
+  public static final String FK_HOSTCOMPONENTDESIREDSTATE_DESIRED_STACK_ID = "FK_hostcomponentdesiredstate_desired_stack_id";
+  public static final String FK_HOSTCOMPONENTSTATE_CURRENT_STACK_ID = "FK_hostcomponentstate_current_stack_id";
 
 
   /**
@@ -187,6 +191,7 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
   @Override
   protected void executeDDLUpdates() throws AmbariException, SQLException {
     int currentVersionID = getCurrentVersionID();
+    dropBrokenFK();
     updateServiceComponentDesiredStateTable(currentVersionID);
     updateServiceDesiredStateTable(currentVersionID);
     addSelectedCollumsToClusterconfigTable();
@@ -202,6 +207,20 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
     removeStaleConstraints();
   }
 
+  /**
+   * Drop broken FK
+   * {@value #FK_SERVICECOMPONENTDESIREDSTATE_DESIRED_STACK_ID}
+   * {@value #FK_SERVICEDESIREDSTATE_DESIRED_STACK_ID}
+   * {@value #FK_HOSTCOMPONENTDESIREDSTATE_DESIRED_STACK_ID}
+   * {@value #FK_HOSTCOMPONENTSTATE_CURRENT_STACK_ID}
+   */
+  private void dropBrokenFK() throws SQLException {
+    dbAccessor.dropFKConstraint(SERVICE_COMPONENT_DESIRED_STATE_TABLE, FK_SERVICECOMPONENTDESIREDSTATE_DESIRED_STACK_ID);
+    dbAccessor.dropFKConstraint(SERVICE_DESIRED_STATE_TABLE, FK_SERVICEDESIREDSTATE_DESIRED_STACK_ID);
+    dbAccessor.dropFKConstraint(HOST_COMPONENT_DESIRED_STATE_TABLE, FK_HOSTCOMPONENTDESIREDSTATE_DESIRED_STACK_ID);
+    dbAccessor.dropFKConstraint(HOST_COMPONENT_STATE_TABLE, FK_HOSTCOMPONENTSTATE_CURRENT_STACK_ID);
+  }
+
 
   /**
    * Updates {@value #VIEWURL_TABLE} table.


[16/50] [abbrv] ambari git commit: AMBARI-22242. Express Upgrade from IOP 4.2 to HDP 2.6.2 AMS failed to restart before finalizing. (swagle)

Posted by ao...@apache.org.
AMBARI-22242. Express Upgrade from IOP 4.2 to HDP 2.6.2 AMS failed to restart before finalizing. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/62a0a18c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/62a0a18c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/62a0a18c

Branch: refs/heads/branch-3.0-perf
Commit: 62a0a18ce57c7290989ab3ff5ffb08e88225570e
Parents: 6e1cac8
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Fri Oct 13 20:55:36 2017 -0700
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Fri Oct 13 20:55:36 2017 -0700

----------------------------------------------------------------------
 .../AMBARI_METRICS/0.1.0/package/scripts/metrics_collector.py    | 4 ++--
 .../AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana.py      | 4 ++--
 .../AMBARI_METRICS/0.1.0/package/scripts/metrics_monitor.py      | 4 ++--
 3 files changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/62a0a18c/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_collector.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_collector.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_collector.py
index fc2576d..1c6f2a1 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_collector.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_collector.py
@@ -44,13 +44,13 @@ class AmsCollector(Script):
     hbase('regionserver', action)
     ams(name='collector')
 
-  def start(self, env):
+  def start(self, env, upgrade_type=None):
     self.configure(env, action = 'start') # for security
     # stop hanging components before start
     ams_service('collector', action = 'stop')
     ams_service('collector', action = 'start')
 
-  def stop(self, env):
+  def stop(self, env, upgrade_type=None):
     import params
     env.set_params(params)
     # Sometimes, stop() may be called before start(), in case restart() is initiated right after installation

http://git-wip-us.apache.org/repos/asf/ambari/blob/62a0a18c/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana.py
index 1f0e049..387b018 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana.py
@@ -38,7 +38,7 @@ class AmsGrafana(Script):
     env.set_params(params)
     ams(name='grafana', action=action)
 
-  def start(self, env):
+  def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)
     self.configure(env, action = 'start')
@@ -61,7 +61,7 @@ class AmsGrafana(Script):
     # Create pre-built dashboards
     create_ams_dashboards()
 
-  def stop(self, env):
+  def stop(self, env, upgrade_type=None):
     import params
     env.set_params(params)
     self.configure(env, action = 'stop')

http://git-wip-us.apache.org/repos/asf/ambari/blob/62a0a18c/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_monitor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_monitor.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_monitor.py
index a377f6d..16c7997 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_monitor.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_monitor.py
@@ -35,14 +35,14 @@ class AmsMonitor(Script):
     env.set_params(params)
     ams(name='monitor')
 
-  def start(self, env):
+  def start(self, env, upgrade_type=None):
     self.configure(env) # for security
 
     ams_service( 'monitor',
                  action = 'start'
     )
 
-  def stop(self, env):
+  def stop(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 


[35/50] [abbrv] ambari git commit: AMBARI-22257. Metrics collector fails to stop after Datanode is stopped in distributed mode. (swagle)

Posted by ao...@apache.org.
AMBARI-22257. Metrics collector fails to stop after Datanode is stopped in distributed mode. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/53f028e9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/53f028e9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/53f028e9

Branch: refs/heads/branch-3.0-perf
Commit: 53f028e92a6a783a161b4540e48484c7ecfaaf44
Parents: 448d6a8
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Tue Oct 17 10:47:43 2017 -0700
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Tue Oct 17 10:47:43 2017 -0700

----------------------------------------------------------------------
 .../src/main/resources/stacks/HDP/2.0.6/role_command_order.json    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/53f028e9/ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json
index 31f26e3..78a31f1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json
@@ -56,7 +56,7 @@
     "NAMENODE-STOP": ["RESOURCEMANAGER-STOP", "NODEMANAGER-STOP",
         "HISTORYSERVER-STOP", "HBASE_MASTER-STOP", "METRICS_COLLECTOR-STOP"],
     "DATANODE-STOP": ["RESOURCEMANAGER-STOP", "NODEMANAGER-STOP",
-        "HISTORYSERVER-STOP", "HBASE_MASTER-STOP"],
+        "HISTORYSERVER-STOP", "HBASE_MASTER-STOP", "METRICS_COLLECTOR-STOP"],
     "METRICS_GRAFANA-START": ["METRICS_COLLECTOR-START"],
     "METRICS_COLLECTOR-STOP": ["METRICS_GRAFANA-STOP"]
   },


[06/50] [abbrv] ambari git commit: AMBARI-22207. [Intermittent] While moving master, a manual commands wizard came in between (alexantonenko)

Posted by ao...@apache.org.
AMBARI-22207. [Intermittent] While moving master, a manual commands wizard came in between (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/68df85d4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/68df85d4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/68df85d4

Branch: refs/heads/branch-3.0-perf
Commit: 68df85d402b047c54f9d0324e7f94b970c5282ee
Parents: be605cb
Author: Alex Antonenko <aa...@hortonworks.com>
Authored: Thu Oct 12 15:53:00 2017 +0300
Committer: Alex Antonenko <aa...@hortonworks.com>
Committed: Thu Oct 12 15:53:00 2017 +0300

----------------------------------------------------------------------
 .../main/service/reassign/step3_controller.js     | 18 ++++++++++++++----
 .../app/mixins/wizard/assign_master_components.js |  2 +-
 .../app/templates/main/service/reassign/step3.hbs |  6 +++---
 .../wizard/assign_master_components_test.js       |  2 +-
 4 files changed, 19 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/68df85d4/ambari-web/app/controllers/main/service/reassign/step3_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/reassign/step3_controller.js b/ambari-web/app/controllers/main/service/reassign/step3_controller.js
index c44c4f3..4898b75 100644
--- a/ambari-web/app/controllers/main/service/reassign/step3_controller.js
+++ b/ambari-web/app/controllers/main/service/reassign/step3_controller.js
@@ -268,9 +268,16 @@ App.ReassignMasterWizardStep3Controller = Em.Controller.extend({
 
   propertiesToChange: {},
 
-  isSubmitDisabled: Em.computed.and('wizardController.isComponentWithReconfiguration', '!isLoaded'),
+  isSubmitDisabled: Em.computed.or('!isLoaded', 'submitButtonClicked'),
+
+  /**
+   * Is Submit-click processing now
+   * @type {bool}
+   */
+  submitButtonClicked: false,
 
   loadStep: function () {
+    this.set('submitButtonClicked', false);
     if (this.get('wizardController.isComponentWithReconfiguration')) {
       this.set('isLoaded', false);
       App.ajax.send({
@@ -695,8 +702,11 @@ App.ReassignMasterWizardStep3Controller = Em.Controller.extend({
   },
 
   submit: function() {
-    App.get('router.mainAdminKerberosController').getKDCSessionState(function() {
-      App.router.send("next");
-    });
+    if (!this.get('submitButtonClicked')) {
+      this.set('submitButtonClicked', true);
+      App.get('router.mainAdminKerberosController').getKDCSessionState(function() {
+        App.router.send("next");
+      });
+    }
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/68df85d4/ambari-web/app/mixins/wizard/assign_master_components.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/wizard/assign_master_components.js b/ambari-web/app/mixins/wizard/assign_master_components.js
index 84a56f1..c9577f8 100644
--- a/ambari-web/app/mixins/wizard/assign_master_components.js
+++ b/ambari-web/app/mixins/wizard/assign_master_components.js
@@ -1195,7 +1195,7 @@ App.AssignMasterComponents = Em.Mixin.create(App.HostComponentValidationMixin, A
     }
   },
 
-  nextButtonDisabled: Em.computed.or('App.router.btnClickInProgress', 'submitDisabled', 'validationInProgress'),
+  nextButtonDisabled: Em.computed.or('App.router.btnClickInProgress', 'submitDisabled', 'validationInProgress', '!isLoaded'),
 
   /**
    * Submit button click handler

http://git-wip-us.apache.org/repos/asf/ambari/blob/68df85d4/ambari-web/app/templates/main/service/reassign/step3.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/service/reassign/step3.hbs b/ambari-web/app/templates/main/service/reassign/step3.hbs
index 1f6a393..a6e42af 100644
--- a/ambari-web/app/templates/main/service/reassign/step3.hbs
+++ b/ambari-web/app/templates/main/service/reassign/step3.hbs
@@ -64,8 +64,8 @@
 </div>
 <div class="wizard-footer col-md-12">
   <div class="btn-area">
-    <a class="btn btn-default pull-left" {{action back href="true"}}>&larr; {{t common.back}}</a>
-    <a class="btn btn-success pull-right"
-       id="spinner" {{bindAttr disabled="controller.isSubmitDisabled"}} {{action submit target="controller"}}>{{t common.deploy}} &rarr;</a>
+      <button class="btn pull-left" {{action back href="true"}}>&larr; {{t common.back}}</button>
+      <button class="btn btn-success pull-right"
+              id="spinner" {{bindAttr disabled="controller.isSubmitDisabled"}} {{action submit target="controller"}}>{{t common.deploy}} &rarr;</button>
   </div>
 </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/68df85d4/ambari-web/test/mixins/wizard/assign_master_components_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mixins/wizard/assign_master_components_test.js b/ambari-web/test/mixins/wizard/assign_master_components_test.js
index 635143c..20efb8e 100644
--- a/ambari-web/test/mixins/wizard/assign_master_components_test.js
+++ b/ambari-web/test/mixins/wizard/assign_master_components_test.js
@@ -211,6 +211,6 @@ describe('App.AssignMasterComponents', function () {
   });
 
   App.TestAliases.testAsComputedOr(baseObject.create(),
-    'nextButtonDisabled', ['App.router.btnClickInProgress', 'submitDisabled', 'validationInProgress']);
+    'nextButtonDisabled', ['App.router.btnClickInProgress', 'submitDisabled', 'validationInProgress', '!isLoaded']);
 
 });
\ No newline at end of file


[11/50] [abbrv] ambari git commit: AMBARI-22230 Hosts and Alerts tables UI tweaks. (atkach)

Posted by ao...@apache.org.
AMBARI-22230 Hosts and Alerts tables UI tweaks. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8f53bc45
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8f53bc45
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8f53bc45

Branch: refs/heads/branch-3.0-perf
Commit: 8f53bc453c47a4f95d5e58825feb77d69e6a6a22
Parents: a8ba5e6
Author: Andrii Tkach <at...@apache.org>
Authored: Fri Oct 13 13:45:06 2017 +0300
Committer: Andrii Tkach <at...@apache.org>
Committed: Fri Oct 13 13:45:06 2017 +0300

----------------------------------------------------------------------
 ambari-web/app/models/alerts/alert_group.js     |   2 +-
 ambari-web/app/styles/application.less          |   5 +
 ambari-web/app/styles/dashboard.less            |   6 +-
 .../app/styles/theme/bootstrap-ambari.css       |   3 +-
 ambari-web/app/templates/main/alerts.hbs        |   3 +-
 .../main/alerts/alert_definitions_actions.hbs   |   2 +-
 ambari-web/app/templates/main/dashboard.hbs     |   2 +-
 .../templates/main/dashboard/config_history.hbs |   2 +-
 ambari-web/app/templates/main/host.hbs          |   2 +-
 .../templates/main/host/bulk_operation_menu.hbs | 132 ++++++++++---------
 .../views/main/host/hosts_table_menu_view.js    |   4 +-
 .../app/views/main/host/stack_versions_view.js  |   2 +-
 .../test/models/alerts/alert_group_test.js      |   5 +-
 13 files changed, 93 insertions(+), 77 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8f53bc45/ambari-web/app/models/alerts/alert_group.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/alerts/alert_group.js b/ambari-web/app/models/alerts/alert_group.js
index 49a850f..3277fc8 100644
--- a/ambari-web/app/models/alerts/alert_group.js
+++ b/ambari-web/app/models/alerts/alert_group.js
@@ -55,7 +55,7 @@ App.AlertGroup = DS.Model.extend({
    * @type {string}
    */
   displayName: function () {
-    var name = App.config.truncateGroupName(this.get('name'));
+    var name = App.config.truncateGroupName(App.format.role(this.get('name'), true));
     return this.get('default') ? name + ' Default' : name;
   }.property('name', 'default'),
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8f53bc45/ambari-web/app/styles/application.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/application.less b/ambari-web/app/styles/application.less
index 7c9cc8e..099f3cf 100644
--- a/ambari-web/app/styles/application.less
+++ b/ambari-web/app/styles/application.less
@@ -2802,4 +2802,9 @@ a.abort-icon:hover {
 .breadcrumbs-forward-slash {
   display: inline;
   color: #D2D3D5;
+}
+
+.container-wrap-table {
+  padding: 0 10px;
+  background-color: @diff-background-equal;
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/8f53bc45/ambari-web/app/styles/dashboard.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/dashboard.less b/ambari-web/app/styles/dashboard.less
index 02835bd..6deb30c 100644
--- a/ambari-web/app/styles/dashboard.less
+++ b/ambari-web/app/styles/dashboard.less
@@ -18,14 +18,12 @@
 
 @import 'common.less';
 
-#dashboard-widgets-container{
-  .tabs-left {
-    float: left;
-  }
+#dashboard-widgets-container {
   .btn-toolbar {
     float: right;
     padding-top: 5px;
     margin-bottom: 20px;
+    margin-top: -60px;
   }
   .dashboard-widgets-box {
     clear: both;

http://git-wip-us.apache.org/repos/asf/ambari/blob/8f53bc45/ambari-web/app/styles/theme/bootstrap-ambari.css
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/theme/bootstrap-ambari.css b/ambari-web/app/styles/theme/bootstrap-ambari.css
index 70579e7..cff73ad 100644
--- a/ambari-web/app/styles/theme/bootstrap-ambari.css
+++ b/ambari-web/app/styles/theme/bootstrap-ambari.css
@@ -479,11 +479,12 @@ h2.table-title {
 }
 .nav.nav-tabs li a .badge.badge-important {
   display: inline;
+  vertical-align: baseline;
 }
 .nav.nav-tabs li.active a {
   color: #333;
   border-bottom: 3px solid #3FAE2A;
-  padding-bottom: 1px;
+  padding-bottom: 2px;
 }
 .nav-tabs-left li,
 .nav-tabs-right li {

http://git-wip-us.apache.org/repos/asf/ambari/blob/8f53bc45/ambari-web/app/templates/main/alerts.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/alerts.hbs b/ambari-web/app/templates/main/alerts.hbs
index 40469d3..0f85e27 100644
--- a/ambari-web/app/templates/main/alerts.hbs
+++ b/ambari-web/app/templates/main/alerts.hbs
@@ -16,7 +16,7 @@
 * limitations under the License.
 }}
 
-<div id="alerts">
+<div id="alerts" class="container-wrap-table">
   <div class="row">
     <h2 class="table-title col-sm-1">{{t menu.item.alerts}}</h2>
     <div class="table-controls row col-sm-11 pull-right">
@@ -56,7 +56,6 @@
               {{view App.AlertDefinitionSummary contentBinding="alertDefinition"}}
             </td>
             <td class="alert-name">
-              <span {{bindAttr title="alertDefinition.type"}} {{bindAttr class=":type-icon  alertDefinition.typeIconClass"}}></span>
               <a href="#" {{action "gotoAlertDetails" alertDefinition}}>{{alertDefinition.label}}</a>
             </td>
             <td class="alert-service">{{alertDefinition.serviceDisplayName}}</td>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8f53bc45/ambari-web/app/templates/main/alerts/alert_definitions_actions.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/alerts/alert_definitions_actions.hbs b/ambari-web/app/templates/main/alerts/alert_definitions_actions.hbs
index 0604ac2..f65b000 100644
--- a/ambari-web/app/templates/main/alerts/alert_definitions_actions.hbs
+++ b/ambari-web/app/templates/main/alerts/alert_definitions_actions.hbs
@@ -16,7 +16,7 @@
 * limitations under the License.
 }}
 
-<button class="btn btn-default dropdown-toggle" data-toggle="dropdown" href="#">{{t common.actions}} <span class="caret"></span></button>
+<button class="btn btn-success dropdown-toggle" data-toggle="dropdown" href="#">{{t common.actions}} <span class="caret"></span></button>
 <ul class="dropdown-menu pull-left">
   {{#each action in controller}}
     <li>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8f53bc45/ambari-web/app/templates/main/dashboard.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/dashboard.hbs b/ambari-web/app/templates/main/dashboard.hbs
index 0226626..4b174cc 100644
--- a/ambari-web/app/templates/main/dashboard.hbs
+++ b/ambari-web/app/templates/main/dashboard.hbs
@@ -18,7 +18,7 @@
 
 <div class="row">
   <div class="summary-width col-md-12" id="dashboard-widgets-container">
-    <ul class="nav nav-tabs background-text tabs-left">
+    <ul class="nav nav-tabs background-text">
       {{#each category in view.categories}}
         {{#view view.NavItemView itemBinding="category.name" }}
           <a href="#" {{action "goToDashboardView" category.url}} >{{category.label}}</a>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8f53bc45/ambari-web/app/templates/main/dashboard/config_history.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/dashboard/config_history.hbs b/ambari-web/app/templates/main/dashboard/config_history.hbs
index 00f9c38..bc6ef7c 100644
--- a/ambari-web/app/templates/main/dashboard/config_history.hbs
+++ b/ambari-web/app/templates/main/dashboard/config_history.hbs
@@ -16,7 +16,7 @@
 * limitations under the License.
 }}
 
-<div id="config_history">
+<div id="config_history" class="container-wrap-table">
   <div class="row">
     <h2 class="table-title col-md-12">{{t dashboard.configHistory.title}}</h2>
   </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8f53bc45/ambari-web/app/templates/main/host.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/host.hbs b/ambari-web/app/templates/main/host.hbs
index ae37a0f..b373c40 100644
--- a/ambari-web/app/templates/main/host.hbs
+++ b/ambari-web/app/templates/main/host.hbs
@@ -16,7 +16,7 @@
 * limitations under the License.
 }}
 
-<div id="hosts">
+<div id="hosts" class="container-wrap-table">
 
   <div class="row">
     <h2 class="table-title col-sm-1">{{t common.hosts}}</h2>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8f53bc45/ambari-web/app/templates/main/host/bulk_operation_menu.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/host/bulk_operation_menu.hbs b/ambari-web/app/templates/main/host/bulk_operation_menu.hbs
index 64ab4e9..89551ca 100644
--- a/ambari-web/app/templates/main/host/bulk_operation_menu.hbs
+++ b/ambari-web/app/templates/main/host/bulk_operation_menu.hbs
@@ -16,7 +16,7 @@
 * limitations under the License.
 }}
 
-<button class="btn btn-default dropdown-toggle" data-toggle="dropdown" href="#">{{t common.actions}} <span class="caret"></span></button>
+<button class="btn btn-success dropdown-toggle" data-toggle="dropdown" href="#">{{t common.actions}} <span class="caret"></span></button>
 <ul class="dropdown-menu">
   {{#isAuthorized "HOST.ADD_DELETE_HOSTS"}}
     <li><a href="#" {{action addHost}}><i class="glyphicon glyphicon-plus glyphicon-white"></i> {{t hosts.host.add}}</a></li>
@@ -29,32 +29,36 @@
       <ul {{bindAttr class="view.parentView.showSelectedFilter::hidden :dropdown-menu"}}>
         {{#view view.hostItemView}}
           <a href="javascript:void(null);">{{view.label}}</a>
-          <ul class="dropdown-menu">
-            {{#each operation in view.operationsInfo}}
-              {{#if operation.label.length}}
-                {{#view view.operationView contentBinding="operation.operationData" selection="s"}}
-                  <a href="javascript:void(null);">{{operation.label}}</a>
-                {{/view}}
-              {{/if}}
-            {{/each}}
-          </ul>
-        {{/view}}
-        {{#each component in view.components}}
-          {{#view view.slaveItemView contentBinding="component"}}
-            <a href="javascript:void(null);">{{component.componentNameFormatted}}</a>
+          <div class="dropdown-menu-wrap">
             <ul class="dropdown-menu">
               {{#each operation in view.operationsInfo}}
-                {{#if operation.decommission}}
-                  {{#view view.advancedOperationView contentBinding="operation.operationData" selection="s"}}
-                    <a href="javascript:void(null);">{{operation.label}}</a>
-                  {{/view}}
-                {{else}}
-                  {{#view view.commonOperationView contentBinding="operation.operationData" selection="s"}}
+                {{#if operation.label.length}}
+                  {{#view view.operationView contentBinding="operation.operationData" selection="s"}}
                     <a href="javascript:void(null);">{{operation.label}}</a>
                   {{/view}}
                 {{/if}}
               {{/each}}
             </ul>
+          </div>
+        {{/view}}
+        {{#each component in view.components}}
+          {{#view view.slaveItemView contentBinding="component"}}
+            <a href="javascript:void(null);">{{component.componentNameFormatted}}</a>
+            <div class="dropdown-menu-wrap">
+              <ul class="dropdown-menu">
+                {{#each operation in view.operationsInfo}}
+                  {{#if operation.decommission}}
+                    {{#view view.advancedOperationView contentBinding="operation.operationData" selection="s"}}
+                      <a href="javascript:void(null);">{{operation.label}}</a>
+                    {{/view}}
+                  {{else}}
+                    {{#view view.commonOperationView contentBinding="operation.operationData" selection="s"}}
+                      <a href="javascript:void(null);">{{operation.label}}</a>
+                    {{/view}}
+                  {{/if}}
+                {{/each}}
+              </ul>
+            </div>
           {{/view}}
         {{/each}}
       </ul>
@@ -67,32 +71,36 @@
       <ul {{bindAttr class="view.parentView.hasFilteredItems::hidden :dropdown-menu"}}>
         {{#view view.hostItemView}}
           <a href="javascript:void(null);">{{view.label}}</a>
-          <ul class="dropdown-menu">
-            {{#each operation in view.operationsInfo}}
-              {{#if operation.label.length}}
-                {{#view view.operationView contentBinding="operation.operationData" selection="f"}}
-                  <a href="javascript:void(null);">{{operation.label}}</a>
-                {{/view}}
-              {{/if}}
-            {{/each}}
-          </ul>
-        {{/view}}
-        {{#each component in view.components}}
-          {{#view view.slaveItemView contentBinding="component"}}
-            <a href="javascript:void(null);">{{component.componentNameFormatted}}</a>
+          <div class="dropdown-menu-wrap">
             <ul class="dropdown-menu">
               {{#each operation in view.operationsInfo}}
-                {{#if operation.decommission}}
-                  {{#view view.advancedOperationView contentBinding="operation.operationData" selection="f"}}
-                    <a href="javascript:void(null);">{{operation.label}}</a>
-                  {{/view}}
-                {{else}}
-                  {{#view view.commonOperationView contentBinding="operation.operationData" selection="f"}}
+                {{#if operation.label.length}}
+                  {{#view view.operationView contentBinding="operation.operationData" selection="f"}}
                     <a href="javascript:void(null);">{{operation.label}}</a>
                   {{/view}}
                 {{/if}}
               {{/each}}
             </ul>
+          </div>
+        {{/view}}
+        {{#each component in view.components}}
+          {{#view view.slaveItemView contentBinding="component"}}
+            <a href="javascript:void(null);">{{component.componentNameFormatted}}</a>
+            <div class="dropdown-menu-wrap">
+              <ul class="dropdown-menu">
+                {{#each operation in view.operationsInfo}}
+                  {{#if operation.decommission}}
+                    {{#view view.advancedOperationView contentBinding="operation.operationData" selection="f"}}
+                      <a href="javascript:void(null);">{{operation.label}}</a>
+                    {{/view}}
+                  {{else}}
+                    {{#view view.commonOperationView contentBinding="operation.operationData" selection="f"}}
+                      <a href="javascript:void(null);">{{operation.label}}</a>
+                    {{/view}}
+                  {{/if}}
+                {{/each}}
+              </ul>
+            </div>
           {{/view}}
         {{/each}}
       </ul>
@@ -105,34 +113,38 @@
       <ul class="dropdown-menu">
         {{#view view.hostItemView}}
           <a href="javascript:void(null);">{{view.label}}</a>
-          <ul class="dropdown-menu">
-            {{#each operation in view.operationsInfo}}
-              {{#if operation.label.length}}
-                {{#view view.operationView contentBinding="operation.operationData" selection="a"}}
-                  <a href="javascript:void(null);">{{operation.label}}</a>
-                {{/view}}
-              {{/if}}
-            {{/each}}
-          </ul>
-        {{/view}}
-        {{#each component in view.components}}
-          {{#view view.slaveItemView contentBinding="component"}}
-            <a href="javascript:void(null);">{{component.componentNameFormatted}}</a>
+          <div class="dropdown-menu-wrap">
             <ul class="dropdown-menu">
               {{#each operation in view.operationsInfo}}
-                {{#if operation.decommission}}
-                  {{#view view.advancedOperationView contentBinding="operation.operationData" selection="a"}}
+                {{#if operation.label.length}}
+                  {{#view view.operationView contentBinding="operation.operationData" selection="a"}}
                     <a href="javascript:void(null);">{{operation.label}}</a>
                   {{/view}}
-                {{else}}
-                  {{#unless operation.delete}}
-                    {{#view view.commonOperationView contentBinding="operation.operationData" selection="a"}}
-                      <a href="javascript:void(null);">{{operation.label}}</a>
-                    {{/view}}
-                  {{/unless}}
                 {{/if}}
               {{/each}}
             </ul>
+          </div>
+        {{/view}}
+        {{#each component in view.components}}
+          {{#view view.slaveItemView contentBinding="component"}}
+            <a href="javascript:void(null);">{{component.componentNameFormatted}}</a>
+            <div class="dropdown-menu-wrap">
+              <ul class="dropdown-menu">
+                {{#each operation in view.operationsInfo}}
+                  {{#if operation.decommission}}
+                    {{#view view.advancedOperationView contentBinding="operation.operationData" selection="a"}}
+                      <a href="javascript:void(null);">{{operation.label}}</a>
+                    {{/view}}
+                  {{else}}
+                    {{#unless operation.delete}}
+                      {{#view view.commonOperationView contentBinding="operation.operationData" selection="a"}}
+                        <a href="javascript:void(null);">{{operation.label}}</a>
+                      {{/view}}
+                    {{/unless}}
+                  {{/if}}
+                {{/each}}
+              </ul>
+            </div>
           {{/view}}
         {{/each}}
       </ul>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8f53bc45/ambari-web/app/views/main/host/hosts_table_menu_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/host/hosts_table_menu_view.js b/ambari-web/app/views/main/host/hosts_table_menu_view.js
index 25b4052..08fd0d4 100644
--- a/ambari-web/app/views/main/host/hosts_table_menu_view.js
+++ b/ambari-web/app/views/main/host/hosts_table_menu_view.js
@@ -70,7 +70,7 @@ App.HostTableMenuView = Em.View.extend({
   slaveItemView: Em.View.extend({
 
     tagName: 'li',
-    classNames: ['dropdown-submenu'],
+    classNames: ['dropdown-submenu', 'submenu-left'],
 
     /**
      * Get third-level menu items ingo for slave components
@@ -240,7 +240,7 @@ App.HostTableMenuView = Em.View.extend({
   hostItemView: Em.View.extend({
 
     tagName: 'li',
-    classNames: ['dropdown-submenu'],
+    classNames: ['dropdown-submenu', 'submenu-left'],
 
     label: Em.I18n.t('common.hosts'),
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8f53bc45/ambari-web/app/views/main/host/stack_versions_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/host/stack_versions_view.js b/ambari-web/app/views/main/host/stack_versions_view.js
index cebe8c7..62a88b4 100644
--- a/ambari-web/app/views/main/host/stack_versions_view.js
+++ b/ambari-web/app/views/main/host/stack_versions_view.js
@@ -22,7 +22,7 @@ var sort = require('views/common/sort_view');
 
 App.MainHostStackVersionsView = App.TableView.extend({
   templateName: require('templates/main/host/stack_versions'),
-  classNames: ['host-tab-content'],
+  classNames: ['host-tab-content', 'container-wrap-table'],
 
   /**
    * @type {Ember.Object}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8f53bc45/ambari-web/test/models/alerts/alert_group_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/alerts/alert_group_test.js b/ambari-web/test/models/alerts/alert_group_test.js
index 6f64b7a..34237d2 100644
--- a/ambari-web/test/models/alerts/alert_group_test.js
+++ b/ambari-web/test/models/alerts/alert_group_test.js
@@ -35,8 +35,9 @@ describe('App.AlertGroup', function() {
   describe('#displayName', function () {
 
     [
-      {name: 'abc', default: true, e: 'abc Default'},
-      {name: 'abc', default: false, e: 'abc'},
+      {name: 'abc', default: true, e: 'Abc Default'},
+      {name: 'abc', default: false, e: 'Abc'},
+      {name: 'ABC', default: false, e: 'Abc'},
       {name: '12345678901234567890', default: true, e: '123456789...234567890 Default'},
       {name: '12345678901234567890', default: false, e: '123456789...234567890'},
     ].forEach(function (test) {


[21/50] [abbrv] ambari git commit: AMBARI-22232 : Need to add a new property to support proxy users for Atlas service. (Vishal Suvagia via mugdha)

Posted by ao...@apache.org.
AMBARI-22232 : Need to add a new property to support proxy users for Atlas service. (Vishal Suvagia via mugdha)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/df108137
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/df108137
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/df108137

Branch: refs/heads/branch-3.0-perf
Commit: df1081378e51c309589540673e140faaf9329ee4
Parents: 499fec3
Author: Vishal Suvagia <vi...@yahoo.com>
Authored: Sat Oct 14 16:22:31 2017 +0530
Committer: Mugdha Varadkar <mu...@apache.org>
Committed: Mon Oct 16 11:15:23 2017 +0530

----------------------------------------------------------------------
 .../AtlasProxyUserConfigCalculation.java        |  48 +++++++++
 .../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml |   4 +
 .../stacks/HDP/2.5/upgrades/upgrade-2.6.xml     |   3 +
 .../configuration/application-properties.xml    |  15 +++
 .../stacks/HDP/2.6/services/stack_advisor.py    |   7 ++
 .../HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml |   4 +
 .../stacks/HDP/2.6/upgrades/upgrade-2.6.xml     |   3 +
 .../AtlasProxyUserConfigCalculationTest.java    | 108 +++++++++++++++++++
 .../stacks/2.6/common/test_stack_advisor.py     |   3 +-
 9 files changed, 194 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/df108137/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculation.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculation.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculation.java
new file mode 100644
index 0000000..85fb200
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculation.java
@@ -0,0 +1,48 @@
+package org.apache.ambari.server.serveraction.upgrades;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.serveraction.AbstractServerAction;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+
+import javax.inject.Inject;
+import java.text.MessageFormat;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+
+public class AtlasProxyUserConfigCalculation extends AbstractUpgradeServerAction {
+
+  private static final String ATLAS_APPLICATION_PROPERTIES_CONFIG_TYPE = "application-properties";
+  private static final String KNOX_ENV_CONFIG_TYPE = "knox-env";
+  private static final String KNOX_USER_CONFIG = "knox_user";
+
+  @Override
+  public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext) throws AmbariException, InterruptedException {
+    String clusterName = getExecutionCommand().getClusterName();
+    Cluster cluster = getClusters().getCluster(clusterName);
+    String outputMessage = "";
+
+    Config atlasApplicationProperties = cluster.getDesiredConfigByType(ATLAS_APPLICATION_PROPERTIES_CONFIG_TYPE);
+    if (null == atlasApplicationProperties) {
+      return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
+        MessageFormat.format("Config type {0} not found, skipping updating property in same.", ATLAS_APPLICATION_PROPERTIES_CONFIG_TYPE), "");
+    }
+
+    Config knoxEnvConfig = cluster.getDesiredConfigByType(KNOX_ENV_CONFIG_TYPE);
+    String atlasProxyUsers = "knox";
+    if (null != knoxEnvConfig && knoxEnvConfig.getProperties().containsKey(KNOX_USER_CONFIG)) {
+      atlasProxyUsers = knoxEnvConfig.getProperties().get(KNOX_USER_CONFIG);
+    }
+
+    Map<String, String> currentAtlasApplicationProperties = atlasApplicationProperties.getProperties();
+    currentAtlasApplicationProperties.put("atlas.proxyusers", atlasProxyUsers);
+    atlasApplicationProperties.setProperties(currentAtlasApplicationProperties);
+    atlasApplicationProperties.save();
+
+    outputMessage = outputMessage + MessageFormat.format("Successfully updated {0} config type.\n", ATLAS_APPLICATION_PROPERTIES_CONFIG_TYPE);
+    return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", outputMessage, "");
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df108137/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
index 1af3b18..cd69a9c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
@@ -488,6 +488,10 @@
         <task xsi:type="configure" id="atlas_log4j_update_logger_settings"/>
       </execute-stage>
 
+      <execute-stage service="ATLAS" component="ATLAS_SERVER" title="Applying Atlas proxy-user configurations.">
+        <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.AtlasProxyUserConfigCalculation"/>
+      </execute-stage>
+
       <!--KAFKA-->
       <execute-stage service="KAFKA" component="KAFKA_BROKER" title="Parameterizing Kafka Log4J Properties">
         <task xsi:type="configure" id="kafka_log4j_parameterize">

http://git-wip-us.apache.org/repos/asf/ambari/blob/df108137/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
index 0a01a04..a6d3f29 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
@@ -1123,6 +1123,9 @@
           <task xsi:type="configure" id="atlas_env_gc_worker"/>
           <task xsi:type="configure" id="hdp_2_6_atlas_kafka_auto_commit_enable_property_delete"/>
           <task xsi:type="configure" id="atlas_log4j_update_logger_settings"/>
+          <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.AtlasProxyUserConfigCalculation" >
+            <summary>Applying Atlas proxy-user configurations.</summary>
+          </task>
         </pre-upgrade>
         <pre-downgrade />
         <upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/df108137/ambari-server/src/main/resources/stacks/HDP/2.6/services/ATLAS/configuration/application-properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/ATLAS/configuration/application-properties.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/ATLAS/configuration/application-properties.xml
index c271dc3..523cb21 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/ATLAS/configuration/application-properties.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/ATLAS/configuration/application-properties.xml
@@ -109,4 +109,19 @@
     <deleted>true</deleted>
     <on-ambari-upgrade add="false"/>
   </property>
+  <property>
+    <name>atlas.proxyusers</name>
+    <value/>
+    <description>Proxy users for Atlas</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <depends-on>
+      <property>
+        <type>knox-env</type>
+        <name>knox_user</name>
+      </property>
+    </depends-on>
+    <on-ambari-upgrade add="false"/>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/df108137/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
index 94f28db..fc12d37 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
@@ -83,6 +83,13 @@ class HDP26StackAdvisor(HDP25StackAdvisor):
         knox_port = services['configurations']["gateway-site"]["properties"]['gateway.port']
       putAtlasApplicationProperty('atlas.sso.knox.providerurl', 'https://{0}:{1}/gateway/knoxsso/api/v1/websso'.format(knox_host, knox_port))
 
+    knox_service_user = ''
+    if 'KNOX' in servicesList and 'knox-env' in services['configurations']:
+      knox_service_user = services['configurations']['knox-env']['properties']['knox_user']
+    else:
+      knox_service_user = 'knox'
+    putAtlasApplicationProperty('atlas.proxyusers',knox_service_user)
+
   def recommendDruidConfigurations(self, configurations, clusterData, services, hosts):
 
       # druid is not in list of services to be installed

http://git-wip-us.apache.org/repos/asf/ambari/blob/df108137/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
index ebb81d9..832c505 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
@@ -376,6 +376,10 @@
         <task xsi:type="configure" id="atlas_hbase_conf_dir"/>
       </execute-stage>
 
+      <execute-stage service="ATLAS" component="ATLAS_SERVER" title="Applying Atlas proxy-user configurations.">
+        <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.AtlasProxyUserConfigCalculation"/>
+      </execute-stage>
+
       <!-- KMS -->
       <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Apply config changes for Ranger Kms plugin">
         <task xsi:type="configure" id="hdp_2_6_maint_ranger_kms_plugin_cluster_name"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/df108137/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
index ae5972e..d0e11a1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
@@ -1050,6 +1050,9 @@
           <task xsi:type="configure" id="hdp_2_6_atlas_kafka_auto_commit_enable_property_delete"/>
           <task xsi:type="configure" id="atlas_log4j_update_logger_settings"/>
           <task xsi:type="configure" id="atlas_hbase_conf_dir"/>
+          <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.AtlasProxyUserConfigCalculation" >
+            <summary>Applying Atlas proxy-user configurations.</summary>
+          </task>
         </pre-upgrade>
         <pre-downgrade/> <!--  no-op to prevent config changes on downgrade -->
         <upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/df108137/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculationTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculationTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculationTest.java
new file mode 100644
index 0000000..3f8bca9
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculationTest.java
@@ -0,0 +1,108 @@
+package org.apache.ambari.server.serveraction.upgrades;
+
+import com.google.inject.Injector;
+import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.agent.ExecutionCommand;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.easymock.EasyMock;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.easymock.EasyMock.*;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+public class AtlasProxyUserConfigCalculationTest {
+
+  private Injector m_injector;
+  private Clusters m_clusters;
+  private Field m_clusterField;
+
+  @Before
+  public void setup() throws Exception {
+    m_injector = createMock(Injector.class);
+    m_clusters = createMock(Clusters.class);
+    Cluster cluster = createMock(Cluster.class);
+
+    Map<String, String> mockKnoxEnvProperties = new HashMap<String, String>() {
+      {
+        put("knox_user", "knox_cstm");
+      }
+    };
+
+    Map<String, String> mockAtlasApplicationProperties = new HashMap<String, String>();
+
+    Config knoxEnvConfig = createMock(Config.class);
+    expect(knoxEnvConfig.getType()).andReturn("knox-env").anyTimes();
+    expect(knoxEnvConfig.getProperties()).andReturn(mockKnoxEnvProperties).anyTimes();
+
+
+    Config atlasApplicationPropertiesConfig = createMock(Config.class);
+    expect(atlasApplicationPropertiesConfig.getType()).andReturn("application-properties").anyTimes();
+    expect(atlasApplicationPropertiesConfig.getProperties()).andReturn(mockAtlasApplicationProperties).anyTimes();
+
+
+    atlasApplicationPropertiesConfig.setProperties(anyObject(Map.class));
+    expectLastCall().atLeastOnce();
+
+    atlasApplicationPropertiesConfig.save();
+    expectLastCall().atLeastOnce();
+
+    expect(cluster.getDesiredConfigByType("knox-env")).andReturn(knoxEnvConfig).atLeastOnce();
+    expect(cluster.getDesiredConfigByType("application-properties")).andReturn(atlasApplicationPropertiesConfig).atLeastOnce();
+    expect(m_clusters.getCluster((String) anyObject())).andReturn(cluster).anyTimes();
+    expect(m_injector.getInstance(Clusters.class)).andReturn(m_clusters).atLeastOnce();
+
+    replay(m_injector, m_clusters, cluster, knoxEnvConfig, atlasApplicationPropertiesConfig);
+
+    m_clusterField = AbstractUpgradeServerAction.class.getDeclaredField("m_clusters");
+    m_clusterField.setAccessible(true);
+
+  }
+
+  @Test
+  public void testAction() throws Exception {
+
+    Map<String, String> commandParams = new HashMap<String, String>();
+    commandParams.put("clusterName", "cl1");
+
+    ExecutionCommand executionCommand = new ExecutionCommand();
+    executionCommand.setCommandParams(commandParams);
+    executionCommand.setClusterName("cl1");
+
+    HostRoleCommand hrc = createMock(HostRoleCommand.class);
+    expect(hrc.getRequestId()).andReturn(1L).anyTimes();
+    expect(hrc.getStageId()).andReturn(2L).anyTimes();
+    expect(hrc.getExecutionCommandWrapper()).andReturn(new ExecutionCommandWrapper(executionCommand)).anyTimes();
+    replay(hrc);
+
+    AtlasProxyUserConfigCalculation action = new AtlasProxyUserConfigCalculation();
+    m_clusterField.set(action, m_clusters);
+    action.setExecutionCommand(executionCommand);
+    action.setHostRoleCommand(hrc);
+
+    CommandReport report = action.execute(null);
+    Assert.assertNotNull(report);
+
+    Cluster cl = m_clusters.getCluster("cl1");
+    Config config = cl.getDesiredConfigByType("application-properties");
+    Map<String, String> map = config.getProperties();
+
+    Assert.assertTrue(map.containsKey("atlas.proxyusers"));
+    Assert.assertEquals("knox_cstm", map.get("atlas.proxyusers"));
+
+    report = action.execute(null);
+    Assert.assertNotNull(report);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df108137/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
index ade08c1..f4c5508 100644
--- a/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
@@ -1303,7 +1303,8 @@ class TestHDP26StackAdvisor(TestCase):
           "atlas.graph.storage.hostname": "",
           "atlas.kafka.bootstrap.servers": "",
           "atlas.kafka.zookeeper.connect": "",
-          "atlas.authorizer.impl": "simple"
+          "atlas.authorizer.impl": "simple",
+          'atlas.proxyusers': 'knox'
         }
       },
       "infra-solr-env": {


[12/50] [abbrv] ambari git commit: AMBARI-22231 Stopping and starting HDFS component in Ambari will result in Namenode to stay in SAFEMODE (dsen)

Posted by ao...@apache.org.
AMBARI-22231 Stopping and starting HDFS component in Ambari will result in Namenode to stay in SAFEMODE (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3a0d168c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3a0d168c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3a0d168c

Branch: refs/heads/branch-3.0-perf
Commit: 3a0d168c08edf559bc91f3eb76485797f83a6e48
Parents: 8f53bc4
Author: Dmytro Sen <ds...@apache.org>
Authored: Fri Oct 13 15:53:09 2017 +0300
Committer: Dmytro Sen <ds...@apache.org>
Committed: Fri Oct 13 15:53:09 2017 +0300

----------------------------------------------------------------------
 .../src/main/resources/stacks/HDP/3.0/role_command_order.json      | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3a0d168c/ambari-server/src/main/resources/stacks/HDP/3.0/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/3.0/role_command_order.json
index 576910f..223eee5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/role_command_order.json
@@ -87,7 +87,7 @@
     "PXF_SERVICE_CHECK-SERVICE_CHECK" : ["PXF-START", "HDFS_SERVICE_CHECK-SERVICE_CHECK", "HBASE_SERVICE_CHECK-SERVICE_CHECK", "HIVE_SERVICE_CHECK-SERVICE_CHECK"],
 
     "RANGER_ADMIN-START": ["ZOOKEEPER_SERVER-START", "INFRA_SOLR-START"],
-    "RANGER_USERSYNC-START" : ["RANGER_ADMIN-START", "RANGER_KMS_SERVER-START"],
+    "RANGER_USERSYNC-START" : ["RANGER_ADMIN-START"],
     "RANGER_KMS_SERVER-START" : ["RANGER_ADMIN-START"],
     "RANGER_KMS_SERVICE_CHECK-SERVICE_CHECK" : ["RANGER_KMS_SERVER-START"],
     "RANGER_SERVICE_CHECK-SERVICE_CHECK" : ["RANGER_USERSYNC-START", "RANGER_ADMIN-START"],


[30/50] [abbrv] ambari git commit: AMBARI-22245 - YARN Service Checks Fails Because of Old hadoop-client Classpath Entry (part2) (jonathanhurley)

Posted by ao...@apache.org.
AMBARI-22245 - YARN Service Checks Fails Because of Old hadoop-client Classpath Entry (part2) (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/79d4ac22
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/79d4ac22
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/79d4ac22

Branch: refs/heads/branch-3.0-perf
Commit: 79d4ac220d41a6bd7c5e77b34ecc230f439cd968
Parents: 75102dc
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon Oct 16 20:06:40 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon Oct 16 20:06:59 2017 -0400

----------------------------------------------------------------------
 .../YARN/3.0.0.3.0/package/scripts/status_params.py       | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/79d4ac22/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/status_params.py
index 6bb528f..d2d1307 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/status_params.py
@@ -33,11 +33,11 @@ mapred_pid_dir_prefix = config['configurations']['mapred-env']['mapred_pid_dir_p
 yarn_pid_dir = format("{yarn_pid_dir_prefix}/{yarn_user}")
 mapred_pid_dir = format("{mapred_pid_dir_prefix}/{mapred_user}")
 
-resourcemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-resourcemanager.pid")
-nodemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-nodemanager.pid")
-yarn_historyserver_pid_file_old = format("{yarn_pid_dir}/yarn-{yarn_user}-historyserver.pid")
-yarn_historyserver_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-timelineserver.pid")  # *-historyserver.pid is deprecated
-mapred_historyserver_pid_file = format("{mapred_pid_dir}/mapred-{mapred_user}-historyserver.pid")
+resourcemanager_pid_file = format("{yarn_pid_dir}/hadoop-{yarn_user}-resourcemanager.pid")
+nodemanager_pid_file = format("{yarn_pid_dir}/hadoop-{yarn_user}-nodemanager.pid")
+yarn_historyserver_pid_file_old = format("{yarn_pid_dir}/hadoop-{yarn_user}-historyserver.pid")
+yarn_historyserver_pid_file = format("{yarn_pid_dir}/hadoop-{yarn_user}-timelineserver.pid")  # *-historyserver.pid is deprecated
+mapred_historyserver_pid_file = format("{mapred_pid_dir}/hadoop-{mapred_user}-historyserver.pid")
 
 hadoop_home = stack_select.get_hadoop_dir("home")
 hadoop_conf_dir = functions.conf_select.get_hadoop_conf_dir()


[39/50] [abbrv] ambari git commit: AMBARI-22250. Upgrade option not showing in stack versions. (Ishan Bhatt via yusaku)

Posted by ao...@apache.org.
AMBARI-22250. Upgrade option not showing in stack versions. (Ishan Bhatt via yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ee509e08
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ee509e08
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ee509e08

Branch: refs/heads/branch-3.0-perf
Commit: ee509e08fa25545953d78450997b3a47bf68f904
Parents: 881e15d
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Tue Oct 17 15:41:20 2017 -0700
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Tue Oct 17 15:41:20 2017 -0700

----------------------------------------------------------------------
 .../views/main/admin/stack_upgrade/upgrade_version_box_view.js   | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ee509e08/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
index 355ad88..c0786a3 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
@@ -279,9 +279,9 @@ App.UpgradeVersionBoxView = Em.View.extend({
         default:
           var isVersionColumnView = this.get('isVersionColumnView');
           var stackServices = this.get('content.stackServices');
-          var isUpgradable = stackServices && stackServices.some( function(stackService){
+          var isUpgradable = stackServices && (this.get('content.isStandard') || stackServices.some( function(stackService){
               return stackService.get('isUpgradable');
-          });
+          }));
           var isPatch = this.get('content.isPatch');
           var isMaint = this.get('content.isMaint');
 


[25/50] [abbrv] ambari git commit: AMBARI-22233.Zeppelin service check failed during EU from 2.5 to 2.6 as ZeppelinServer can not be instantiated(Prabhjyot Singh Via Venkata Sairam)

Posted by ao...@apache.org.
AMBARI-22233.Zeppelin service check failed during EU from 2.5 to 2.6 as ZeppelinServer can not be instantiated(Prabhjyot Singh Via Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0317cf71
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0317cf71
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0317cf71

Branch: refs/heads/branch-3.0-perf
Commit: 0317cf7163165f4a6f90b2337bf97382679dfee4
Parents: 2a8ac0d
Author: Venkata Sairam <ve...@gmail.com>
Authored: Mon Oct 16 18:20:06 2017 +0530
Committer: Venkata Sairam <ve...@gmail.com>
Committed: Mon Oct 16 18:20:06 2017 +0530

----------------------------------------------------------------------
 .../ZEPPELIN/0.7.0/package/scripts/master.py    | 40 +++++++++++++++++---
 .../stacks/2.6/ZEPPELIN/test_zeppelin_070.py    |  3 ++
 2 files changed, 37 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0317cf71/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
index 9d179b8..6ccdfba 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
@@ -192,7 +192,7 @@ class Master(Script):
       notebook_directory = "/user/" + format("{zeppelin_user}") + "/" + \
                            params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir']
 
-    if not self.is_path_exists_in_HDFS(notebook_directory, params.zeppelin_user):
+    if not self.is_directory_exists_in_HDFS(notebook_directory, params.zeppelin_user):
       # hdfs dfs -mkdir {notebook_directory}
       params.HdfsResource(format("{notebook_directory}"),
                           type="directory",
@@ -243,7 +243,7 @@ class Master(Script):
       self.create_zeppelin_dir(params)
 
     if params.conf_stored_in_hdfs:
-      if not self.is_path_exists_in_HDFS(self.get_zeppelin_conf_FS_directory(params), params.zeppelin_user):
+      if not self.is_directory_exists_in_HDFS(self.get_zeppelin_conf_FS_directory(params), params.zeppelin_user):
         # hdfs dfs -mkdir {zeppelin's conf directory}
         params.HdfsResource(self.get_zeppelin_conf_FS_directory(params),
                             type="directory",
@@ -314,15 +314,17 @@ class Master(Script):
   def get_zeppelin_conf_FS(self, params):
     return self.get_zeppelin_conf_FS_directory(params) + "/interpreter.json"
 
-  def is_path_exists_in_HDFS(self, path, as_user):
+  def is_directory_exists_in_HDFS(self, path, as_user):
     kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
     kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
-    path_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -e {path};echo $?"),
+
+    #-d: if the path is a directory, return 0.
+    path_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -d {path};echo $?"),
                              user=as_user)[1]
 
     # if there is no kerberos setup then the string will contain "-bash: kinit: command not found"
     if "\n" in path_exists:
-      path_exists = path_exists.split("\n")[1]
+      path_exists = path_exists.split("\n").pop()
 
     # '1' means it does not exists
     if path_exists == '0':
@@ -330,6 +332,31 @@ class Master(Script):
     else:
       return False
 
+  def is_file_exists_in_HDFS(self, path, as_user):
+    kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+    kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
+
+    #-f: if the path is a file, return 0.
+    path_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -f {path};echo $?"),
+                             user=as_user)[1]
+
+    # if there is no kerberos setup then the string will contain "-bash: kinit: command not found"
+    if "\n" in path_exists:
+      path_exists = path_exists.split("\n").pop()
+
+    # '1' means it does not exists
+    if path_exists == '0':
+      #-z: if the file is zero length, return 0.
+      path_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -z {path};echo $?"),
+                               user=as_user)[1]
+
+      if "\n" in path_exists:
+        path_exists = path_exists.split("\n").pop()
+      if path_exists != '0':
+        return True
+
+    return False
+
   def get_interpreter_settings(self):
     import params
     import json
@@ -338,7 +365,7 @@ class Master(Script):
     if params.conf_stored_in_hdfs:
       zeppelin_conf_fs = self.get_zeppelin_conf_FS(params)
 
-      if self.is_path_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user):
+      if self.is_file_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user):
         # copy from hdfs to /etc/zeppelin/conf/interpreter.json
         kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',None))
         kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
@@ -358,6 +385,7 @@ class Master(Script):
     File(interpreter_config,
          group=params.zeppelin_group,
          owner=params.zeppelin_user,
+         mode=0644,
          content=json.dumps(config_data, indent=2))
 
     if params.conf_stored_in_hdfs:

http://git-wip-us.apache.org/repos/asf/ambari/blob/0317cf71/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
index 400350c..f5027b2 100644
--- a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
+++ b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
@@ -364,6 +364,7 @@ class TestZeppelin070(RMFTestCase):
                           content=interpreter_json_generated.template_after_base,
                           owner='zeppelin',
                           group='zeppelin',
+                          mode=0644
                           )
 
 
@@ -392,6 +393,7 @@ class TestZeppelin070(RMFTestCase):
                               content=interpreter_json_generated.template_after_without_spark_and_livy,
                               owner='zeppelin',
                               group='zeppelin',
+                              mode=0644
                               )
 
     self.assertResourceCalled('HdfsResource',
@@ -419,6 +421,7 @@ class TestZeppelin070(RMFTestCase):
                               content=interpreter_json_generated.template_after_kerberos,
                               owner='zeppelin',
                               group='zeppelin',
+                              mode=0644
                               )
 
     self.assertResourceCalled('HdfsResource', 'hdfs:///user/zeppelin/conf/interpreter.json',


[44/50] [abbrv] ambari git commit: AMBARI-22258. Use correct property attribute setter fn(). while setting 'tez.runtime.io.sort.mb' maximum value.

Posted by ao...@apache.org.
AMBARI-22258. Use correct property attribute setter fn(). while setting 'tez.runtime.io.sort.mb' maximum value.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3b5cbed0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3b5cbed0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3b5cbed0

Branch: refs/heads/branch-3.0-perf
Commit: 3b5cbed0983ef3e47b6a948dcbeeef0ac5d6a604
Parents: 37d59f2
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Wed Oct 18 13:05:40 2017 -0700
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Wed Oct 18 13:05:40 2017 -0700

----------------------------------------------------------------------
 .../resources/common-services/YARN/3.0.0.3.0/service_advisor.py | 3 ++-
 .../src/main/resources/stacks/HDP/2.5/services/stack_advisor.py | 3 ++-
 .../src/test/python/stacks/2.5/common/test_stack_advisor.py     | 5 ++++-
 3 files changed, 8 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3b5cbed0/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
index ecf245d..70eb5be 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
@@ -580,6 +580,7 @@ class YARNRecommender(service_advisor.ServiceAdvisor):
     putHiveInteractiveEnvProperty = self.putProperty(configurations, "hive-interactive-env", services)
     putHiveInteractiveEnvPropertyAttribute = self.putPropertyAttribute(configurations, "hive-interactive-env")
     putTezInteractiveSiteProperty = self.putProperty(configurations, "tez-interactive-site", services)
+    putTezInteractiveSitePropertyAttribute = self.putPropertyAttribute(configurations, "tez-interactive-site")
     llap_daemon_selected_queue_name = None
     selected_queue_is_ambari_managed_llap = None  # Queue named 'llap' at root level is Ambari managed.
     llap_selected_queue_am_percent = None
@@ -1047,7 +1048,7 @@ class YARNRecommender(service_advisor.ServiceAdvisor):
     putTezInteractiveSiteProperty('tez.runtime.io.sort.mb', tez_runtime_io_sort_mb)
     if "tez-site" in services["configurations"] and "tez.runtime.sorter.class" in services["configurations"]["tez-site"]["properties"]:
       if services["configurations"]["tez-site"]["properties"]["tez.runtime.sorter.class"] == "LEGACY":
-        putTezInteractiveSiteProperty("tez.runtime.io.sort.mb", "maximum", 1800)
+        putTezInteractiveSitePropertyAttribute("tez.runtime.io.sort.mb", "maximum", 1800)
 
     putTezInteractiveSiteProperty('tez.runtime.unordered.output.buffer.size-mb', tez_runtime_unordered_output_buffer_size)
     putHiveInteractiveSiteProperty('hive.auto.convert.join.noconditionaltask.size', hive_auto_convert_join_noconditionaltask_size)

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b5cbed0/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index 92ce9b9..b6f2478 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -860,6 +860,7 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     putHiveInteractiveEnvProperty = self.putProperty(configurations, "hive-interactive-env", services)
     putHiveInteractiveEnvPropertyAttribute = self.putPropertyAttribute(configurations, "hive-interactive-env")
     putTezInteractiveSiteProperty = self.putProperty(configurations, "tez-interactive-site", services)
+    putTezInteractiveSitePropertyAttribute = self.putPropertyAttribute(configurations, "tez-interactive-site")
     llap_daemon_selected_queue_name = None
     selected_queue_is_ambari_managed_llap = None  # Queue named 'llap' at root level is Ambari managed.
     llap_selected_queue_am_percent = None
@@ -1331,7 +1332,7 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     putTezInteractiveSiteProperty('tez.runtime.io.sort.mb', tez_runtime_io_sort_mb)
     if "tez-site" in services["configurations"] and "tez.runtime.sorter.class" in services["configurations"]["tez-site"]["properties"]:
       if services["configurations"]["tez-site"]["properties"]["tez.runtime.sorter.class"] == "LEGACY":
-        putTezInteractiveSiteProperty("tez.runtime.io.sort.mb", "maximum", 1800)
+        putTezInteractiveSitePropertyAttribute("tez.runtime.io.sort.mb", "maximum", 1800)
 
     putTezInteractiveSiteProperty('tez.runtime.unordered.output.buffer.size-mb', tez_runtime_unordered_output_buffer_size)
     putHiveInteractiveSiteProperty('hive.auto.convert.join.noconditionaltask.size', hive_auto_convert_join_noconditionaltask_size)

http://git-wip-us.apache.org/repos/asf/ambari/blob/3b5cbed0/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
index cf462de..407e78d 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
@@ -4446,7 +4446,8 @@ class TestHDP25StackAdvisor(TestCase):
           },
         "tez-site": {
           "properties": {
-            "tez.am.resource.memory.mb": "1024"
+            "tez.am.resource.memory.mb": "1024",
+            "tez.runtime.sorter.class": "LEGACY"
           }
         },
       }
@@ -4481,6 +4482,8 @@ class TestHDP25StackAdvisor(TestCase):
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.io.memory.size'], '186368')
     self.assertEqual(configurations['hive-interactive-env']['properties']['llap_heap_size'], '9830')
+    self.assertEqual(configurations['tez-interactive-site']['properties']['tez.runtime.io.sort.mb'], '1092')
+    self.assertEquals(configurations['tez-interactive-site']['property_attributes']['tez.runtime.io.sort.mb'], {'maximum': '1800'})
 
 
 


[17/50] [abbrv] ambari git commit: AMBARI-22241: accumulo_script.py doesn't override init (Josh Elser via jluniya)

Posted by ao...@apache.org.
AMBARI-22241: accumulo_script.py doesn't override init (Josh Elser via jluniya)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/32906779
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/32906779
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/32906779

Branch: refs/heads/branch-3.0-perf
Commit: 329067797ecf70b765f6cd5a9febf89dcfbac0a5
Parents: 62a0a18
Author: Jayush Luniya <jl...@hortonworks.com>
Authored: Fri Oct 13 21:15:50 2017 -0700
Committer: Jayush Luniya <jl...@hortonworks.com>
Committed: Fri Oct 13 21:15:50 2017 -0700

----------------------------------------------------------------------
 .../ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py         | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/32906779/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
index 6aafb05..2a95820 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
@@ -37,6 +37,7 @@ from accumulo_service import accumulo_service
 class AccumuloScript(Script):
 
   def __init__(self, component):
+    Script.__init__(self)
     self.component = component
 
 


[34/50] [abbrv] ambari git commit: AMBARI-22255. Make the install modal full screen (alexantonenko)

Posted by ao...@apache.org.
AMBARI-22255. Make the install modal full screen (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/448d6a8b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/448d6a8b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/448d6a8b

Branch: refs/heads/branch-3.0-perf
Commit: 448d6a8b235583a20206b790bee8170de32f27b7
Parents: ce29f8c
Author: Alex Antonenko <aa...@hortonworks.com>
Authored: Tue Oct 17 20:26:49 2017 +0300
Committer: Alex Antonenko <aa...@hortonworks.com>
Committed: Tue Oct 17 20:26:49 2017 +0300

----------------------------------------------------------------------
 ambari-web/app/messages.js             |  1 +
 ambari-web/app/routes/installer.js     |  3 +--
 ambari-web/app/routes/main.js          |  1 +
 ambari-web/app/styles/application.less |  3 +--
 ambari-web/app/styles/common.less      |  2 +-
 ambari-web/app/styles/wizard.less      | 24 +++++++++++++++++++++++-
 ambari-web/app/templates/installer.hbs |  2 +-
 7 files changed, 29 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/448d6a8b/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 3c4f038..0123356 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -21,6 +21,7 @@ Em.I18n.translations = {
   'app.name':'Ambari',
   'app.name.subtitle':'Ambari - {0}',
   'app.name.subtitle.experimental':'Ambari Experimental',
+  'app.name.subtitle.installer':'Cluster Install Wizard',
   'app.reloadPopup.link': 'Reload Page',
   'app.reloadPopup.text': 'Trying to connect to server...',
   'app.reloadPopup.noClusterName.text': 'Failed to retrieve cluster name, trying to reload...',

http://git-wip-us.apache.org/repos/asf/ambari/blob/448d6a8b/ambari-web/app/routes/installer.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/installer.js b/ambari-web/app/routes/installer.js
index ec8f1b9..f1f6659 100644
--- a/ambari-web/app/routes/installer.js
+++ b/ambari-web/app/routes/installer.js
@@ -35,8 +35,7 @@ module.exports = Em.Route.extend(App.RouterRedirections, {
           // check server/web client versions match
           App.router.get('installerController').checkServerClientVersion().done(function () {
 
-            var name = 'Cluster Install Wizard';
-            $('title').text('Ambari - ' + name);
+            $('title').text(Em.I18n.t('app.name.subtitle').format(Em.I18n.t('app.name.subtitle.installer')));
             $('#main').addClass('install-wizard-content');
 
             App.router.get('mainViewsController').loadAmbariViews();

http://git-wip-us.apache.org/repos/asf/ambari/blob/448d6a8b/ambari-web/app/routes/main.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/main.js b/ambari-web/app/routes/main.js
index 7ed18de..d1f9012 100644
--- a/ambari-web/app/routes/main.js
+++ b/ambari-web/app/routes/main.js
@@ -40,6 +40,7 @@ module.exports = Em.Route.extend(App.RouterRedirections, {
           clusterController.loadAmbariProperties().complete(function () {
             App.router.get('mainViewsController').loadAmbariViews();
             clusterController.loadClusterName(false).done(function () {
+              $('#main').removeClass('install-wizard-content');
               if (App.get('testMode')) {
                 router.get('mainController').initialize();
               } else {

http://git-wip-us.apache.org/repos/asf/ambari/blob/448d6a8b/ambari-web/app/styles/application.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/application.less b/ambari-web/app/styles/application.less
index 099f3cf..d40ab8e 100644
--- a/ambari-web/app/styles/application.less
+++ b/ambari-web/app/styles/application.less
@@ -80,7 +80,6 @@ footer {
   color: #666;
   margin-top: -1 * @footer-height - 1;
   height: @footer-height - 60px;
-  margin-left: @side-navigation-width;
 }
 
 .editable-list-container {
@@ -2807,4 +2806,4 @@ a.abort-icon:hover {
 .container-wrap-table {
   padding: 0 10px;
   background-color: @diff-background-equal;
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/448d6a8b/ambari-web/app/styles/common.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/common.less b/ambari-web/app/styles/common.less
index 7753a59..7202342 100644
--- a/ambari-web/app/styles/common.less
+++ b/ambari-web/app/styles/common.less
@@ -186,7 +186,7 @@
 }
 
 @footer-height: 100px;
-@side-navigation-width: 190px;
+@side-navigation-width: 230px;
 
 @default-font-size: 14px;
 @smaller-font-size: 12px;

http://git-wip-us.apache.org/repos/asf/ambari/blob/448d6a8b/ambari-web/app/styles/wizard.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/wizard.less b/ambari-web/app/styles/wizard.less
index 5f60378..cb9eb92 100644
--- a/ambari-web/app/styles/wizard.less
+++ b/ambari-web/app/styles/wizard.less
@@ -659,6 +659,28 @@
   }
 }
 
+.install-wizard-content {
+  #top-nav .navbar.navbar-static-top {
+    margin-bottom: 0;
+  }
+  .navbar-static-top > .main-container,
+  >.main-container {
+    width: 100%;
+  }
+  >.main-container {
+    padding: 0;
+    .wizard {
+      border: none;
+      .wizard-nav {
+        width: 250px;
+      }
+      .wizard-content.col-md-9 {
+        width: calc(~"100% - 250px");
+      }
+    }
+  }
+}
+
 @media all and (max-width: 2560px) {
    .wizard-content {
     #serviceConfig {
@@ -675,4 +697,4 @@
       }
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/448d6a8b/ambari-web/app/templates/installer.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/installer.hbs b/ambari-web/app/templates/installer.hbs
index f63664a..73ecff4 100644
--- a/ambari-web/app/templates/installer.hbs
+++ b/ambari-web/app/templates/installer.hbs
@@ -23,7 +23,7 @@
 {{else}}
   <div class="wizard">
     <div class="wizard-body row">
-      <div class="wizard-nav col-md-3">
+      <div class="wizard-nav">
         <ul class="nav nav-pills nav-stacked" {{QAAttr "wizard-nav"}}>
           <li {{QAAttr "wizard-nav-step"}} {{bindAttr class="isStep0:active view.isStep0Disabled:disabled view.isStep0Completed:completed"}}><a href="javascript:void(null);"  {{action gotoStep0 target="controller"}}><i class="step-marker"><span class="step-index">0</span></i><p class="step-name">{{t installer.step0.header}}</p></a></li>
           <li {{QAAttr "wizard-nav-step"}} {{bindAttr class="isStep1:active view.isStep1Disabled:disabled view.isStep1Completed:completed"}}><a href="javascript:void(null);"  {{action gotoStep1 target="controller"}}><i class="step-marker"><span class="step-index">1</span></i><p class="step-name">{{t installer.step1.header}}</p></a></li>


[09/50] [abbrv] ambari git commit: AMBARI-22224. host and hostname built-in variables are not populated when performing Kerberos-related operations (rlevas)

Posted by ao...@apache.org.
AMBARI-22224. host and hostname built-in variables are not populated when performing Kerberos-related operations (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0f76c7f9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0f76c7f9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0f76c7f9

Branch: refs/heads/branch-3.0-perf
Commit: 0f76c7f9d642bced5c89e0a25986c12c89213912
Parents: d88d3cc
Author: Robert Levas <rl...@hortonworks.com>
Authored: Thu Oct 12 15:49:57 2017 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Thu Oct 12 15:49:57 2017 -0400

----------------------------------------------------------------------
 .../ambari/server/controller/KerberosHelperImpl.java  |  2 +-
 .../kerberos/AbstractPrepareKerberosServerAction.java | 14 ++++++++++++++
 .../upgrades/PreconfigureKerberosAction.java          | 13 +++++++++++++
 .../AbstractPrepareKerberosServerActionTest.java      |  5 +++++
 4 files changed, 33 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0f76c7f9/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
index f8fe31a..4f14614 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
@@ -1644,7 +1644,7 @@ public class KerberosHelperImpl implements KerberosHelper {
             // Calculate the current host-specific configurations. These will be used to replace
             // variables within the Kerberos descriptor data
             Map<String, Map<String, String>> configurations = calculateConfigurations(cluster,
-                hostname.equals(ambariServerHostname) ? null : hostname,
+                hostname,
                 kerberosDescriptor,
                 false,
                 false);

http://git-wip-us.apache.org/repos/asf/ambari/blob/0f76c7f9/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
index 3db844a..7948a60 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
@@ -40,6 +40,7 @@ import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
 import org.apache.ambari.server.utils.StageUtils;
+import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -141,6 +142,19 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
           if (serviceDescriptor != null) {
             List<KerberosIdentityDescriptor> serviceIdentities = serviceDescriptor.getIdentities(true, filterContext);
 
+            if (!StringUtils.isEmpty(hostName)) {
+              // Update the configurations with the relevant hostname
+              Map<String, String> generalProperties = currentConfigurations.get("");
+              if (generalProperties == null) {
+                generalProperties = new HashMap<>();
+                currentConfigurations.put("", generalProperties);
+              }
+
+              // Add the current hostname under "host" and "hostname"
+              generalProperties.put("host", hostName);
+              generalProperties.put("hostname", hostName);
+            }
+
             // Add service-level principals (and keytabs)
             kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, serviceIdentities,
                 identityFilter, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations, excludeHeadless);

http://git-wip-us.apache.org/repos/asf/ambari/blob/0f76c7f9/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosAction.java
index d18f333..5af7c6b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosAction.java
@@ -305,6 +305,19 @@ public class PreconfigureKerberosAction extends AbstractUpgradeServerAction {
 
             KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(serviceName);
 
+            if (!StringUtils.isEmpty(hostName)) {
+              // Update the configurations with the relevant hostname
+              Map<String, String> generalProperties = currentConfigurations.get("");
+              if (generalProperties == null) {
+                generalProperties = new HashMap<>();
+                currentConfigurations.put("", generalProperties);
+              }
+
+              // Add the current hostname under "host" and "hostname"
+              generalProperties.put("host", hostName);
+              generalProperties.put("hostname", hostName);
+            }
+
             if (serviceDescriptor != null) {
               List<KerberosIdentityDescriptor> serviceIdentities = serviceDescriptor.getIdentities(true, filterContext);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0f76c7f9/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
index 8ff5ad2..5522132 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
@@ -45,6 +45,7 @@ import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
 import org.easymock.EasyMock;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -152,6 +153,10 @@ public class AbstractPrepareKerberosServerActionTest {
         false, propertiesToIgnore, false);
 
     verify(kerberosHelper);
+
+    // Ensure the host and hostname values were set in the configuration context
+    Assert.assertEquals("host1", configurations.get("").get("host"));
+    Assert.assertEquals("host1", configurations.get("").get("hostname"));
   }
 
 }


[15/50] [abbrv] ambari git commit: AMBARI-22237. Restarting Storm Supervisor from Ambari always fails while Stop and Start works fine. (Arpit Khare, swagle via swagle)

Posted by ao...@apache.org.
AMBARI-22237. Restarting Storm Supervisor from Ambari always fails while Stop and Start works fine. (Arpit Khare, swagle via swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6e1cac85
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6e1cac85
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6e1cac85

Branch: refs/heads/branch-3.0-perf
Commit: 6e1cac85e33132c93fd843e402e3702ee584b947
Parents: c8c1812
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Fri Oct 13 20:55:22 2017 -0700
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Fri Oct 13 20:55:22 2017 -0700

----------------------------------------------------------------------
 .../STORM/0.9.1/package/scripts/supervisord_service.py              | 1 +
 .../STORM/1.0.1.3.0/package/scripts/supervisord_service.py          | 1 +
 2 files changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6e1cac85/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisord_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisord_service.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisord_service.py
index 6ff9f9c..498db15 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisord_service.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisord_service.py
@@ -20,6 +20,7 @@ limitations under the License.
 
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.format import format
+from resource_management.core.exceptions import Fail
 
 def supervisord_service(component_name, action):
   Execute(format("supervisorctl {action} storm-{component_name}"),

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e1cac85/ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/package/scripts/supervisord_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/package/scripts/supervisord_service.py b/ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/package/scripts/supervisord_service.py
index 6ff9f9c..498db15 100644
--- a/ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/package/scripts/supervisord_service.py
+++ b/ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/package/scripts/supervisord_service.py
@@ -20,6 +20,7 @@ limitations under the License.
 
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.format import format
+from resource_management.core.exceptions import Fail
 
 def supervisord_service(component_name, action):
   Execute(format("supervisorctl {action} storm-{component_name}"),


[36/50] [abbrv] ambari git commit: AMBARI-22254. RU: RU failed on 'Updating configuration sqoop-atlas-application.properties' (ncole)

Posted by ao...@apache.org.
AMBARI-22254. RU: RU failed on 'Updating configuration sqoop-atlas-application.properties' (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c924ebdb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c924ebdb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c924ebdb

Branch: refs/heads/branch-3.0-perf
Commit: c924ebdb6da802ab70f95c6f974087645a600b78
Parents: 53f028e
Author: Nate Cole <nc...@hortonworks.com>
Authored: Tue Oct 17 13:38:10 2017 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Tue Oct 17 14:18:24 2017 -0400

----------------------------------------------------------------------
 .../serveraction/upgrades/ConfigureAction.java  |  5 ++++
 .../ambari/server/state/stack/UpgradePack.java  | 30 +++++++++++++++++++-
 .../state/stack/upgrade/ClusterGrouping.java    | 13 +++++++++
 .../state/stack/upgrade/ConfigureTask.java      | 16 +++++++++++
 .../internal/UpgradeResourceProviderTest.java   | 13 +++++++++
 .../state/stack/ConfigUpgradeValidityTest.java  | 10 +++++++
 6 files changed, 86 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c924ebdb/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
index a7f910f..f15a507 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
@@ -183,6 +183,11 @@ public class ConfigureAction extends AbstractUpgradeServerAction {
     String configType = commandParameters.get(ConfigureTask.PARAMETER_CONFIG_TYPE);
     String serviceName = cluster.getServiceByConfigType(configType);
 
+    // !!! we couldn't get the service based on its config type, so try the associated
+    if (StringUtils.isBlank(serviceName)) {
+      serviceName = commandParameters.get(ConfigureTask.PARAMETER_ASSOCIATED_SERVICE);
+    }
+
     RepositoryVersionEntity sourceRepoVersion = upgradeContext.getSourceRepositoryVersion(serviceName);
     RepositoryVersionEntity targetRepoVersion = upgradeContext.getTargetRepositoryVersion(serviceName);
     StackId sourceStackId = sourceRepoVersion.getStackId();

http://git-wip-us.apache.org/repos/asf/ambari/blob/c924ebdb/ambari-server/src/main/java/org/apache/ambari/server/state/stack/UpgradePack.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/UpgradePack.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/UpgradePack.java
index 256b71d..7c32f37 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/UpgradePack.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/UpgradePack.java
@@ -37,11 +37,13 @@ import javax.xml.bind.annotation.XmlValue;
 
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.state.stack.upgrade.ClusterGrouping;
+import org.apache.ambari.server.state.stack.upgrade.ConfigureTask;
 import org.apache.ambari.server.state.stack.upgrade.Direction;
 import org.apache.ambari.server.state.stack.upgrade.Grouping;
 import org.apache.ambari.server.state.stack.upgrade.ServiceCheckGrouping;
 import org.apache.ambari.server.state.stack.upgrade.Task;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
+import org.apache.commons.collections.CollectionUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -446,7 +448,7 @@ public class UpgradePack {
   private void initializeProcessingComponentMappings() {
     m_process = new LinkedHashMap<>();
 
-    if (null == processing || processing.isEmpty()) {
+    if (CollectionUtils.isEmpty(processing)) {
       return;
     }
 
@@ -574,6 +576,32 @@ public class UpgradePack {
 
         throw new RuntimeException(error);
       }
+
+      // !!! check for config tasks and mark the associated service
+      initializeTasks(service.name, preTasks);
+      initializeTasks(service.name, postTasks);
+      initializeTasks(service.name, tasks);
+      initializeTasks(service.name, preDowngradeTasks);
+      initializeTasks(service.name, postDowngradeTasks);
+    }
+
+    /**
+     * Checks for config tasks and marks the associated service.
+     * @param service
+     *          the service name
+     * @param tasks
+     *          the list of tasks to check
+     */
+    private void initializeTasks(String service, List<Task> tasks) {
+      if (CollectionUtils.isEmpty(tasks)) {
+        return;
+      }
+
+      for (Task task : tasks) {
+        if (Task.Type.CONFIGURE == task.getType()) {
+          ((ConfigureTask) task).associatedService = service;
+        }
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c924ebdb/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
index 65038e2..5bc3d8f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
@@ -27,6 +27,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import javax.xml.bind.Unmarshaller;
 import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlAttribute;
@@ -126,6 +127,18 @@ public class ClusterGrouping extends Grouping {
       return Objects.toStringHelper(this).add("id", id).add("title",
           title).omitNullValues().toString();
     }
+
+    /**
+     * If a task is found that is configure, set its associated service.  This is used
+     * if the configuration type cannot be isolated by service.
+     */
+    void afterUnmarshal(Unmarshaller unmarshaller, Object parent) {
+      if (task.getType().equals(Task.Type.CONFIGURE) && StringUtils.isNotEmpty(service)) {
+        ((ConfigureTask) task).associatedService = service;
+      }
+    }
+
+
   }
 
   public class ClusterBuilder extends StageWrapperBuilder {

http://git-wip-us.apache.org/repos/asf/ambari/blob/c924ebdb/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
index f88691d..75b5f59 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
@@ -26,6 +26,7 @@ import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlAttribute;
 import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlTransient;
 import javax.xml.bind.annotation.XmlType;
 
 import org.apache.ambari.server.serveraction.upgrades.ConfigureAction;
@@ -91,6 +92,11 @@ public class ConfigureTask extends ServerSideActionTask {
    */
   public static final String PARAMETER_INSERTIONS = "configure-task-insertions";
 
+  /**
+   * The associated service for the config task
+   */
+  public static final String PARAMETER_ASSOCIATED_SERVICE = "configure-task-associated-service";
+
   public static final String actionVerb = "Configuring";
 
   /**
@@ -114,6 +120,12 @@ public class ConfigureTask extends ServerSideActionTask {
   public boolean supportsPatch = false;
 
   /**
+   * The associated service is the service where this config task is specified
+   */
+  @XmlTransient
+  public String associatedService;
+
+  /**
    * {@inheritDoc}
    */
   @Override
@@ -236,6 +248,10 @@ public class ConfigureTask extends ServerSideActionTask {
       configParameters.put(ConfigureTask.PARAMETER_INSERTIONS, m_gson.toJson(insertions));
     }
 
+    if (StringUtils.isNotEmpty(associatedService)) {
+      configParameters.put(ConfigureTask.PARAMETER_ASSOCIATED_SERVICE, associatedService);
+    }
+
     return configParameters;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c924ebdb/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
index 20adac2..d6b1ab3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
@@ -45,6 +45,7 @@ import org.apache.ambari.server.Role;
 import org.apache.ambari.server.RoleCommand;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
+import org.apache.ambari.server.actionmanager.ExecutionCommandWrapperFactory;
 import org.apache.ambari.server.actionmanager.HostRoleCommand;
 import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.actionmanager.Stage;
@@ -67,12 +68,14 @@ import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.ExecutionCommandDAO;
 import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
 import org.apache.ambari.server.orm.dao.RequestDAO;
 import org.apache.ambari.server.orm.dao.StackDAO;
 import org.apache.ambari.server.orm.dao.StageDAO;
 import org.apache.ambari.server.orm.dao.UpgradeDAO;
+import org.apache.ambari.server.orm.entities.ExecutionCommandEntity;
 import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
 import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.orm.entities.RequestEntity;
@@ -100,6 +103,7 @@ import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.UpgradeContext;
 import org.apache.ambari.server.state.UpgradeHelper;
 import org.apache.ambari.server.state.UpgradeState;
+import org.apache.ambari.server.state.stack.upgrade.ConfigureTask;
 import org.apache.ambari.server.state.stack.upgrade.Direction;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 import org.apache.ambari.server.topology.TopologyManager;
@@ -1832,6 +1836,15 @@ public class UpgradeResourceProviderTest extends EasyMockSupport {
       if (StringUtils.isNotBlank(command.getCustomCommandName()) &&
           command.getCustomCommandName().equals(ConfigureAction.class.getName())) {
         foundConfigTask = true;
+
+        ExecutionCommandDAO dao = injector.getInstance(ExecutionCommandDAO.class);
+        ExecutionCommandEntity entity = dao.findByPK(command.getTaskId());
+        ExecutionCommandWrapperFactory factory = injector.getInstance(ExecutionCommandWrapperFactory.class);
+        ExecutionCommandWrapper wrapper = factory.createFromJson(new String(entity.getCommand()));
+        Map<String, String> params = wrapper.getExecutionCommand().getCommandParams();
+        assertTrue(params.containsKey(ConfigureTask.PARAMETER_ASSOCIATED_SERVICE));
+        assertEquals("ZOOKEEPER", params.get(ConfigureTask.PARAMETER_ASSOCIATED_SERVICE));
+
         break;
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c924ebdb/ambari-server/src/test/java/org/apache/ambari/server/state/stack/ConfigUpgradeValidityTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/ConfigUpgradeValidityTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/ConfigUpgradeValidityTest.java
index 2259e30..2930590 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/ConfigUpgradeValidityTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/ConfigUpgradeValidityTest.java
@@ -45,6 +45,7 @@ import org.apache.ambari.server.state.stack.upgrade.Task.Type;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.filefilter.FileFilterUtils;
 import org.apache.commons.io.filefilter.IOFileFilter;
+import org.apache.commons.lang.StringUtils;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -128,6 +129,12 @@ public class ConfigUpgradeValidityTest {
                   ConfigureTask configureTask = (ConfigureTask) executionStage.task;
                   assertIdDefinitionExists(configureTask.id, configUpgradePack, upgradePack,
                       sourceStack);
+
+                  if (StringUtils.isNotBlank(executionStage.service)) {
+                    Assert.assertEquals(executionStage.service, configureTask.associatedService);
+                  } else {
+                    Assert.assertTrue(null == configureTask.associatedService);
+                  }
                 }
               }
             }
@@ -144,6 +151,7 @@ public class ConfigUpgradeValidityTest {
                   ConfigureTask configureTask = (ConfigureTask) preTask;
                   assertIdDefinitionExists(configureTask.id, configUpgradePack, upgradePack,
                       sourceStack);
+                  Assert.assertTrue(StringUtils.isNotBlank(configureTask.associatedService));
                 }
               }
 
@@ -153,6 +161,7 @@ public class ConfigUpgradeValidityTest {
                     ConfigureTask configureTask = (ConfigureTask) task;
                     assertIdDefinitionExists(configureTask.id, configUpgradePack, upgradePack,
                         sourceStack);
+                    Assert.assertTrue(StringUtils.isNotBlank(configureTask.associatedService));
                   }
                 }
               }
@@ -163,6 +172,7 @@ public class ConfigUpgradeValidityTest {
                     ConfigureTask configureTask = (ConfigureTask) postTask;
                     assertIdDefinitionExists(configureTask.id, configUpgradePack, upgradePack,
                         sourceStack);
+                    Assert.assertTrue(StringUtils.isNotBlank(configureTask.associatedService));
                   }
                 }
               }


[05/50] [abbrv] ambari git commit: AMBARI-22212.Alter logic for storing interpreter.json(Prabhjyot Singh via Venkata Sairam)

Posted by ao...@apache.org.
AMBARI-22212.Alter logic for storing interpreter.json(Prabhjyot Singh via Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/be605cbf
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/be605cbf
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/be605cbf

Branch: refs/heads/branch-3.0-perf
Commit: be605cbfdc977ff927df89fc772b8fbb1f589fce
Parents: 3c3b1b8
Author: Venkata Sairam <ve...@gmail.com>
Authored: Thu Oct 12 17:17:07 2017 +0530
Committer: Venkata Sairam <ve...@gmail.com>
Committed: Thu Oct 12 17:17:07 2017 +0530

----------------------------------------------------------------------
 .../ZEPPELIN/0.7.0/package/scripts/master.py    | 69 +++++++++-----------
 .../ZEPPELIN/0.7.0/package/scripts/params.py    |  5 ++
 .../stacks/2.6/ZEPPELIN/test_zeppelin_070.py    | 52 +++------------
 3 files changed, 44 insertions(+), 82 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/be605cbf/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
index eaa2cb9..a8b1b32 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
@@ -242,6 +242,17 @@ class Master(Script):
     if zeppelin_spark_dependencies and os.path.exists(zeppelin_spark_dependencies[0]):
       self.create_zeppelin_dir(params)
 
+    if params.conf_stored_in_hdfs:
+      if not self.is_path_exists_in_HDFS(self.get_zeppelin_conf_FS_directory(params), params.zeppelin_user):
+        # hdfs dfs -mkdir {zeppelin's conf directory}
+        params.HdfsResource(self.get_zeppelin_conf_FS_directory(params),
+                            type="directory",
+                            action="create_on_execute",
+                            owner=params.zeppelin_user,
+                            recursive_chown=True,
+                            recursive_chmod=True
+                            )
+
     # if first_setup:
     if not glob.glob(params.conf_dir + "/interpreter.json") and \
       not os.path.exists(params.conf_dir + "/interpreter.json"):
@@ -323,21 +334,16 @@ class Master(Script):
     import json
 
     interpreter_config = os.path.join(params.conf_dir, "interpreter.json")
-    if 'zeppelin.notebook.storage' in params.config['configurations']['zeppelin-config'] \
-      and params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] == 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo':
-
-      if 'zeppelin.config.fs.dir' in params.config['configurations']['zeppelin-config']:
-        zeppelin_conf_fs = self.get_zeppelin_conf_FS(params)
-
-        if self.is_path_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user):
-          # copy from hdfs to /etc/zeppelin/conf/interpreter.json
-          kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',None))
-          kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
-          shell.call(format("rm {interpreter_config};"
-                            "{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -get {zeppelin_conf_fs} {interpreter_config}"),
-                     user=params.zeppelin_user)
-        else:
-          Logger.info(format("{zeppelin_conf_fs} does not exist. Skipping upload of DFS."))
+    if params.conf_stored_in_hdfs:
+      zeppelin_conf_fs = self.get_zeppelin_conf_FS(params)
+
+      if self.is_path_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user):
+        # copy from hdfs to /etc/zeppelin/conf/interpreter.json
+        kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',None))
+        kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
+        shell.call(format("rm {interpreter_config};"
+                          "{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -get {zeppelin_conf_fs} {interpreter_config}"),
+                   user=params.zeppelin_user)
 
     config_content = sudo.read_file(interpreter_config)
     config_data = json.loads(config_content)
@@ -353,28 +359,15 @@ class Master(Script):
          owner=params.zeppelin_user,
          content=json.dumps(config_data, indent=2))
 
-    if 'zeppelin.notebook.storage' in params.config['configurations']['zeppelin-config'] \
-      and params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] == 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo':
-
-      if 'zeppelin.config.fs.dir' in params.config['configurations']['zeppelin-config']:
-        if not self.is_path_exists_in_HDFS(self.get_zeppelin_conf_FS_directory(params), params.zeppelin_user):
-          # hdfs dfs -mkdir {zeppelin's conf directory}
-          params.HdfsResource(self.get_zeppelin_conf_FS_directory(params),
-                              type="directory",
-                              action="create_on_execute",
-                              owner=params.zeppelin_user,
-                              recursive_chown=True,
-                              recursive_chmod=True
-                          )
-
-        params.HdfsResource(self.get_zeppelin_conf_FS(params),
-                            type="file",
-                            action="create_on_execute",
-                            source=interpreter_config,
-                            owner=params.zeppelin_user,
-                            recursive_chown=True,
-                            recursive_chmod=True,
-                            replace_existing_files=True)
+    if params.conf_stored_in_hdfs:
+      params.HdfsResource(self.get_zeppelin_conf_FS(params),
+                          type="file",
+                          action="create_on_execute",
+                          source=interpreter_config,
+                          owner=params.zeppelin_user,
+                          recursive_chown=True,
+                          recursive_chmod=True,
+                          replace_existing_files=True)
 
   def update_kerberos_properties(self):
     import params
@@ -451,7 +444,7 @@ class Master(Script):
     hive_interactive_properties_key = 'hive_interactive'
     for setting_key in interpreter_settings.keys():
       interpreter = interpreter_settings[setting_key]
-      if interpreter['group'] == 'jdbc':
+      if interpreter['group'] == 'jdbc' and interpreter['name'] == 'jdbc':
         interpreter['dependencies'] = []
 
         if not params.hive_server_host and params.hive_server_interactive_hosts:

http://git-wip-us.apache.org/repos/asf/ambari/blob/be605cbf/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
index b7d3df0..e69037c 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
@@ -104,6 +104,11 @@ conf_dir = "/etc/zeppelin/conf"
 external_dependency_conf = "/etc/zeppelin/conf/external-dependency-conf"
 notebook_dir = os.path.join(*[install_dir, zeppelin_dirname, 'notebook'])
 
+conf_stored_in_hdfs = False
+if 'zeppelin.config.fs.dir' in config['configurations']['zeppelin-config'] and \
+  not config['configurations']['zeppelin-config']['zeppelin.config.fs.dir'].startswith('file://'):
+  conf_stored_in_hdfs = True
+
 # zeppelin-env.sh
 zeppelin_env_content = config['configurations']['zeppelin-env']['zeppelin_env_content']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/be605cbf/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
index 3adb94c..e5d0240 100644
--- a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
+++ b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
@@ -341,12 +341,6 @@ class TestZeppelin070(RMFTestCase):
                               security_enabled=False,
                               )
 
-    self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
-                          content=interpreter_json_generated.template_after_base,
-                          owner='zeppelin',
-                          group='zeppelin',
-                          )
-
     self.assertResourceCalled('HdfsResource',
                               '/user/zeppelin/hdfs:///user/zeppelin/conf',
                               security_enabled=False,
@@ -359,13 +353,20 @@ class TestZeppelin070(RMFTestCase):
                               principal_name=UnknownConfigurationMock(),
                               recursive_chown=True,
                               recursive_chmod=True,
-                              owner='zeppelin',
                               user='hdfs',
+                              owner='zeppelin',
                               hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               type='directory',
                               action=['create_on_execute'],
                               )
 
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
+                          content=interpreter_json_generated.template_after_base,
+                          owner='zeppelin',
+                          group='zeppelin',
+                          )
+
+
     self.assertResourceCalled('HdfsResource',
                               '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
                               security_enabled=False,
@@ -394,25 +395,6 @@ class TestZeppelin070(RMFTestCase):
                               )
 
     self.assertResourceCalled('HdfsResource',
-                              '/user/zeppelin/hdfs:///user/zeppelin/conf',
-                              security_enabled=False,
-                              hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
-                              keytab=UnknownConfigurationMock(),
-                              default_fs='hdfs://c6401.ambari.apache.org:8020',
-                              recursive_chown=True,
-                              recursive_chmod=True,
-                              hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-                              hdfs_site={u'a': u'b'},
-                              kinit_path_local='/usr/bin/kinit',
-                              principal_name=UnknownConfigurationMock(),
-                              user='hdfs',
-                              owner='zeppelin',
-                              hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
-                              type='directory',
-                              action=['create_on_execute'],
-                              )
-
-    self.assertResourceCalled('HdfsResource',
                               '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
                               security_enabled=False,
                               hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
@@ -439,24 +421,6 @@ class TestZeppelin070(RMFTestCase):
                               group='zeppelin',
                               )
 
-    self.assertResourceCalled('HdfsResource', '/user/zeppelin/hdfs:///user/zeppelin/conf',
-                              security_enabled = False,
-                              hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
-                              keytab = UnknownConfigurationMock(),
-                              default_fs = 'hdfs://c6401.ambari.apache.org:8020',
-                              hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-                              hdfs_site = {u'a': u'b'},
-                              kinit_path_local = '/usr/bin/kinit',
-                              principal_name = UnknownConfigurationMock(),
-                              recursive_chown=True,
-                              recursive_chmod=True,
-                              user = 'hdfs',
-                              owner = 'zeppelin',
-                              hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
-                              type = 'directory',
-                              action = ['create_on_execute'],
-                              )
-
     self.assertResourceCalled('HdfsResource', '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
         security_enabled = False,
         hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',


[04/50] [abbrv] ambari git commit: AMBARI-22214.Livy protocol to be set to https in Zeppelin's interpreter setting(Venkata Sairam)

Posted by ao...@apache.org.
AMBARI-22214.Livy protocol to be set to https in Zeppelin's interpreter setting(Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3c3b1b81
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3c3b1b81
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3c3b1b81

Branch: refs/heads/branch-3.0-perf
Commit: 3c3b1b810ee05490fe2fe370de6132ba31bf8b72
Parents: b86f53f
Author: Venkata Sairam <ve...@gmail.com>
Authored: Thu Oct 12 17:05:12 2017 +0530
Committer: Venkata Sairam <ve...@gmail.com>
Committed: Thu Oct 12 17:05:12 2017 +0530

----------------------------------------------------------------------
 .../common-services/ZEPPELIN/0.7.0/package/scripts/master.py   | 6 ++++--
 .../common-services/ZEPPELIN/0.7.0/package/scripts/params.py   | 6 ++++++
 2 files changed, 10 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3c3b1b81/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
index 09944bd..eaa2cb9 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
@@ -530,14 +530,16 @@ class Master(Script):
 
       elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy':
         if params.livy_livyserver_host:
-          interpreter['properties']['zeppelin.livy.url'] = "http://" + params.livy_livyserver_host + \
+          interpreter['properties']['zeppelin.livy.url'] = params.livy_livyserver_protocol + \
+                                                           "://" + params.livy_livyserver_host + \
                                                            ":" + params.livy_livyserver_port
         else:
           del interpreter_settings[setting_key]
 
       elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy2':
         if params.livy2_livyserver_host:
-          interpreter['properties']['zeppelin.livy.url'] = "http://" + params.livy2_livyserver_host + \
+          interpreter['properties']['zeppelin.livy.url'] = params.livy2_livyserver_protocol + \
+                                                           "://" + params.livy2_livyserver_host + \
                                                            ":" + params.livy2_livyserver_port
         else:
           del interpreter_settings[setting_key]

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c3b1b81/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
index 3242f26..b7d3df0 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
@@ -219,17 +219,23 @@ livy2_hosts = default("/clusterHostInfo/livy2_server_hosts", [])
 
 livy_livyserver_host = None
 livy_livyserver_port = None
+livy_livyserver_protocol = 'http'
 livy2_livyserver_host = None
 livy2_livyserver_port = None
+livy2_livyserver_protocol = 'http'
 if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY, stack_version_formatted) and \
     len(livy_hosts) > 0:
   livy_livyserver_host = str(livy_hosts[0])
   livy_livyserver_port = config['configurations']['livy-conf']['livy.server.port']
+  if 'livy.keystore' in config['configurations']['livy-conf']:
+    livy_livyserver_protocol = 'https'
 
 if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY2, stack_version_formatted) and \
     len(livy2_hosts) > 0:
   livy2_livyserver_host = str(livy2_hosts[0])
   livy2_livyserver_port = config['configurations']['livy2-conf']['livy.server.port']
+  if 'livy.keystore' in config['configurations']['livy2-conf']:
+    livy2_livyserver_protocol = 'https'
 
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 security_enabled = config['configurations']['cluster-env']['security_enabled']


[27/50] [abbrv] ambari git commit: AMBARI-22232 - Need to add a new property to support proxy users property for Atlas service (Vishal Suvagia via jonathanhurley)

Posted by ao...@apache.org.
AMBARI-22232  - Need to add a new property to support proxy users property for Atlas service (Vishal Suvagia via jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/523f505d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/523f505d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/523f505d

Branch: refs/heads/branch-3.0-perf
Commit: 523f505dca99d1a8b9dd07ffabc1bad110910566
Parents: 5f86f15
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon Oct 16 12:56:57 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon Oct 16 12:58:02 2017 -0400

----------------------------------------------------------------------
 .../AtlasProxyUserConfigCalculation.java        | 11 ++++------
 .../AtlasProxyUserConfigCalculationTest.java    | 22 +++++++++++---------
 2 files changed, 16 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/523f505d/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculation.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculation.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculation.java
index 40d64b5..1a99299 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculation.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculation.java
@@ -17,19 +17,16 @@
  */
 package org.apache.ambari.server.serveraction.upgrades;
 
+import java.text.MessageFormat;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.agent.CommandReport;
-import org.apache.ambari.server.serveraction.AbstractServerAction;
 import org.apache.ambari.server.state.Cluster;
-import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 
-import javax.inject.Inject;
-import java.text.MessageFormat;
-import java.util.Map;
-import java.util.concurrent.ConcurrentMap;
-
 public class AtlasProxyUserConfigCalculation extends AbstractUpgradeServerAction {
 
   private static final String ATLAS_APPLICATION_PROPERTIES_CONFIG_TYPE = "application-properties";

http://git-wip-us.apache.org/repos/asf/ambari/blob/523f505d/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculationTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculationTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculationTest.java
index 33ec7f3..7cb6255 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculationTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/AtlasProxyUserConfigCalculationTest.java
@@ -17,7 +17,16 @@
  */
 package org.apache.ambari.server.serveraction.upgrades;
 
-import com.google.inject.Injector;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.Map;
+
 import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
 import org.apache.ambari.server.actionmanager.HostRoleCommand;
 import org.apache.ambari.server.agent.CommandReport;
@@ -25,20 +34,13 @@ import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
-import org.easymock.EasyMock;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import java.lang.reflect.Field;
-import java.util.HashMap;
-import java.util.Map;
 
-import static org.easymock.EasyMock.*;
-import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+
+import com.google.inject.Injector;
 
 public class AtlasProxyUserConfigCalculationTest {
 


[18/50] [abbrv] ambari git commit: AMBARI-22203. ZKFC start failed due to hadoop-hdfs-zkfc is not supported (partial revert) (ncole)

Posted by ao...@apache.org.
AMBARI-22203. ZKFC start failed due to hadoop-hdfs-zkfc is not supported (partial revert) (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8c017c1f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8c017c1f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8c017c1f

Branch: refs/heads/branch-3.0-perf
Commit: 8c017c1fdebdbd0261749df0434dc86744dcf986
Parents: 3290677
Author: Nate Cole <nc...@hortonworks.com>
Authored: Sat Oct 14 14:53:14 2017 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Sat Oct 14 14:53:14 2017 -0400

----------------------------------------------------------------------
 .../main/resources/stacks/HDP/2.0.6/properties/stack_packages.json | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8c017c1f/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
index c217ea8..b8655d7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
@@ -368,7 +368,7 @@
             "hadoop-hdfs-zkfc"
           ],
           "STANDARD": [
-            "hadoop-client"
+            "hadoop-hdfs-zkfc"
           ]
         }
       },


[40/50] [abbrv] ambari git commit: AMBARI-22263. Disable Actions menu while Wizard in progress instead of hide (alexantonenko)

Posted by ao...@apache.org.
AMBARI-22263. Disable Actions menu while Wizard in progress instead of hide (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/84342f6a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/84342f6a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/84342f6a

Branch: refs/heads/branch-3.0-perf
Commit: 84342f6a851daa12ef612a96e0076ea893fd12f2
Parents: ee509e0
Author: Alex Antonenko <aa...@hortonworks.com>
Authored: Wed Oct 18 18:26:08 2017 +0300
Committer: Alex Antonenko <aa...@hortonworks.com>
Committed: Wed Oct 18 18:26:08 2017 +0300

----------------------------------------------------------------------
 ambari-web/app/app.js                           |  24 ++-
 .../models/configs/service_config_version.js    |   9 +-
 ambari-web/app/models/host_stack_version.js     |   4 +-
 .../common/configs/config_history_flow.hbs      |   8 +-
 .../common/configs/service_version_box.hbs      |   8 +-
 .../templates/common/host_progress_popup.hbs    |   6 +-
 .../modal_popups/widget_browser_popup.hbs       |   6 +-
 ambari-web/app/templates/experimental.hbs       |   6 +-
 .../main/admin/stack_upgrade/versions.hbs       |   6 +-
 ambari-web/app/templates/main/host.hbs          |   4 +-
 .../templates/main/host/bulk_operation_menu.hbs |   4 +-
 .../main/host/details/host_component.hbs        | 176 ++++++++++---------
 .../app/templates/main/host/stack_versions.hbs  |   4 +-
 ambari-web/app/templates/main/host/summary.hbs  | 120 +++++++------
 ambari-web/app/utils/helper.js                  |  23 +++
 ambari-web/app/views/main/service/item.js       |   2 +-
 ambari-web/test/views/main/service/item_test.js |   6 +-
 17 files changed, 224 insertions(+), 192 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/app.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/app.js b/ambari-web/app/app.js
index 2c638e4..6432849 100644
--- a/ambari-web/app/app.js
+++ b/ambari-web/app/app.js
@@ -131,27 +131,18 @@ module.exports = Em.Application.create({
   }.property('upgradeIsRunning', 'upgradeAborted', 'router.wizardWatcherController.isNonWizardUser', 'upgradeSuspended'),
 
   /**
-   * Options:
-   *  - ignoreWizard: ignore when some wizard is running by another user (default `false`)
-   *
    * @param {string} authRoles
-   * @param {object} options
    * @returns {boolean}
    */
-  isAuthorized: function (authRoles, options) {
-    options = $.extend({ignoreWizard: false}, options);
+  havePermissions: function (authRoles) {
     var result = false;
     authRoles = $.map(authRoles.split(","), $.trim);
 
     // When Upgrade running(not suspended) only operations related to upgrade should be allowed
     if ((!this.get('upgradeSuspended') && !authRoles.contains('CLUSTER.UPGRADE_DOWNGRADE_STACK')) &&
-        !App.get('supports.opsDuringRollingUpgrade') &&
-        !['NOT_REQUIRED', 'COMPLETED'].contains(this.get('upgradeState')) ||
-        !App.auth){
-      return false;
-    }
-
-    if (!options.ignoreWizard && App.router.get('wizardWatcherController.isNonWizardUser')) {
+      !App.get('supports.opsDuringRollingUpgrade') &&
+      !['NOT_REQUIRED', 'COMPLETED'].contains(this.get('upgradeState')) ||
+      !App.auth){
       return false;
     }
 
@@ -161,6 +152,13 @@ module.exports = Em.Application.create({
 
     return result;
   },
+  /**
+   * @param {string} authRoles
+   * @returns {boolean}
+   */
+  isAuthorized: function (authRoles) {
+    return this.havePermissions(authRoles) && !App.router.get('wizardWatcherController.isNonWizardUser');
+  },
 
   isStackServicesLoaded: false,
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/models/configs/service_config_version.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/configs/service_config_version.js b/ambari-web/app/models/configs/service_config_version.js
index 4120681..7645377 100644
--- a/ambari-web/app/models/configs/service_config_version.js
+++ b/ambari-web/app/models/configs/service_config_version.js
@@ -141,12 +141,13 @@ App.ServiceConfigVersion = DS.Model.extend({
    * {{view: (string|boolean), compare: (string|boolean), revert: (string|boolean)}} disabledActionAttr
    */
   disabledActionAttr: function () {
+    var isNonWizardUser = App.router.get('wizardWatcherController.isNonWizardUser');
     return {
-      view: (this.get('isDisplayed')) ? 'disabled' : false,
-      compare: (this.get('isDisabled') || this.get('isDisplayed')) ? 'disabled' : false,
-      revert: (this.get('isDisabled') || this.get('isCurrent')) ? 'disabled' : false
+      view: (this.get('isDisplayed') || isNonWizardUser) ? 'disabled' : false,
+      compare: (this.get('isDisabled') || isNonWizardUser || this.get('isDisplayed')) ? 'disabled' : false,
+      revert: (this.get('isDisabled') || isNonWizardUser || this.get('isCurrent')) ? 'disabled' : false
     }
-  }.property('isDisplayed', 'isCurrent', 'isDisabled')
+  }.property('isDisplayed', 'isCurrent', 'isDisabled', 'App.router.wizardWatcherController.isNonWizardUser')
 });
 
 App.ServiceConfigVersion.FIXTURES = [];

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/models/host_stack_version.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/host_stack_version.js b/ambari-web/app/models/host_stack_version.js
index 7e6d0a0..15e62f4 100644
--- a/ambari-web/app/models/host_stack_version.js
+++ b/ambari-web/app/models/host_stack_version.js
@@ -61,7 +61,9 @@ App.HostStackVersion = DS.Model.extend({
    */
   installEnabled: Em.computed.existsIn('status', ['OUT_OF_SYNC', 'INSTALL_FAILED']),
 
-  installDisabled: Em.computed.not('installEnabled')
+  installDisabled: function(){
+    return !this.get('installEnabled') || App.router.get('wizardWatcherController.isNonWizardUser');
+  }.property('installEnabled', 'App.routerwizardWatcherController.isNonWizardUser')
 });
 
 App.HostStackVersion.FIXTURES = [];

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/templates/common/configs/config_history_flow.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/configs/config_history_flow.hbs b/ambari-web/app/templates/common/configs/config_history_flow.hbs
index f063406..99cd140 100644
--- a/ambari-web/app/templates/common/configs/config_history_flow.hbs
+++ b/ambari-web/app/templates/common/configs/config_history_flow.hbs
@@ -70,12 +70,12 @@
         </div>
         <div class="version-operations-buttons">
           <button {{bindAttr disabled="view.parentView.hoveredServiceVersion.disabledActionAttr.view" class=":btn :btn-default view.parentView.hoveredServiceVersion.isDisplayed:not-allowed-cursor" title="view.parentView.hoveredServiceVersion.disabledActionMessages.view"}} {{action doAction undefined view.parentView.actionTypes.SWITCH target="view.parentView"}}><i class="glyphicon glyphicon-search"></i>&nbsp;{{t common.view}}</button>
-          {{#isAuthorized "SERVICE.COMPARE_CONFIGS"}}
+          {{#havePermissions "SERVICE.COMPARE_CONFIGS"}}
             <button {{bindAttr disabled="view.parentView.hoveredServiceVersion.disabledActionAttr.compare" class=":btn :btn-default view.parentView.hoveredServiceVersion.isDisplayed:not-allowed-cursor" title="view.parentView.hoveredServiceVersion.disabledActionMessages.compare"}} {{action doAction undefined view.parentView.actionTypes.COMPARE target="view.parentView"}}><i class="glyphicon glyphicon-copy"></i>&nbsp;{{t common.compare}}</button>
-          {{/isAuthorized}}
-          {{#isAuthorized "SERVICE.MODIFY_CONFIGS"}}
+          {{/havePermissions}}
+          {{#havePermissions "SERVICE.MODIFY_CONFIGS"}}
             <button {{bindAttr disabled="view.parentView.hoveredServiceVersion.disabledActionAttr.revert" class=":btn :btn-default view.parentView.hoveredServiceVersion.isCurrent:not-allowed-cursor view.parentView.hoveredServiceVersion.isCompatible::hidden" title="view.parentView.hoveredServiceVersion.disabledActionMessages.revert"}} {{action doAction undefined view.parentView.actionTypes.REVERT target="view.parentView"}}>{{t dashboard.configHistory.info-bar.revert.button}}</button>
-          {{/isAuthorized}}
+          {{/havePermissions}}
         </div>
       {{/if}}
     {{/view}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/templates/common/configs/service_version_box.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/configs/service_version_box.hbs b/ambari-web/app/templates/common/configs/service_version_box.hbs
index aaa4520..738b83d 100644
--- a/ambari-web/app/templates/common/configs/service_version_box.hbs
+++ b/ambari-web/app/templates/common/configs/service_version_box.hbs
@@ -46,16 +46,16 @@
         <button {{bindAttr disabled="view.disabledActionAttr.view" class=":btn :btn-default view.serviceVersion.isDisplayed:not-allowed-cursor" title="view.disabledActionMessages.view"}} {{action doAction view.serviceVersion view.actionTypes.SWITCH target="view.parentView"}}>
           <i class="glyphicon glyphicon-search"></i>&nbsp;{{t common.view}}
         </button>
-        {{#isAuthorized "SERVICE.COMPARE_CONFIGS"}}
+        {{#havePermissions "SERVICE.COMPARE_CONFIGS"}}
           <button {{bindAttr disabled="view.disabledActionAttr.compare" class=":btn :btn-default view.serviceVersion.isDisplayed:not-allowed-cursor" title="view.disabledActionMessages.compare"}} {{action doAction view.serviceVersion view.actionTypes.COMPARE target="view.parentView"}}>
             <i class="glyphicon glyphicon-copy"></i>&nbsp;{{t common.compare}}
           </button>
-        {{/isAuthorized}}
-        {{#isAuthorized "SERVICE.MODIFY_CONFIGS"}}
+        {{/havePermissions}}
+        {{#havePermissions "SERVICE.MODIFY_CONFIGS"}}
           <button {{bindAttr disabled="view.disabledActionAttr.revert" class=":btn :btn-default view.serviceVersion.isCurrent:not-allowed-cursor view.serviceVersion.isCompatible::hidden" title="view.disabledActionMessages.revert"}} {{action doAction view.serviceVersion view.actionTypes.REVERT target="view.parentView"}}>
             {{t dashboard.configHistory.info-bar.revert.button}}
           </button>
-        {{/isAuthorized}}
+        {{/havePermissions}}
       </div>
     </div>
   </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/templates/common/host_progress_popup.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/host_progress_popup.hbs b/ambari-web/app/templates/common/host_progress_popup.hbs
index f5b1c73..31358e4 100644
--- a/ambari-web/app/templates/common/host_progress_popup.hbs
+++ b/ambari-web/app/templates/common/host_progress_popup.hbs
@@ -256,11 +256,11 @@
           <div class="col-sm-12">
             {{#if App.supports.logSearch}}
               {{#if view.isLogSearchInstalled}}
-                {{#isAuthorized "SERVICE.VIEW_OPERATIONAL_LOGS"}}
-                  <button type="button" class="btn btn-link pull-right" {{action navigateToHostLogs target="view"}} {{bindAttr class="view.isLogsLinkVisible::hidden"}}>
+                {{#havePermissions "SERVICE.VIEW_OPERATIONAL_LOGS"}}
+                  <button type="button" class="btn btn-link pull-right" {{action navigateToHostLogs target="view"}} {{bindAttr class="view.isLogsLinkVisible::hidden" disabled="App.router.wizardWatcherController.isNonWizardUser"}}>
                     <i class="glyphicon glyphicon-file"></i>&nbsp;{{t common.host}} {{t common.logs}}
                   </button>
-                {{/isAuthorized}}
+                {{/havePermissions}}
               {{/if}}
             {{/if}}
             <button type="button" class="btn btn-link pull-right" {{translateAttr title="common.openNewWindow"}} {{action openTaskLogInDialog}}>

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/templates/common/modal_popups/widget_browser_popup.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/modal_popups/widget_browser_popup.hbs b/ambari-web/app/templates/common/modal_popups/widget_browser_popup.hbs
index 2cc40c2..9910d07 100644
--- a/ambari-web/app/templates/common/modal_popups/widget_browser_popup.hbs
+++ b/ambari-web/app/templates/common/modal_popups/widget_browser_popup.hbs
@@ -26,13 +26,13 @@
           <li {{bindAttr class="service.isActive:active"}}>
             <a {{action "filterByService" service.value target="view"}}>{{service.label}}</a></li>
         {{/each}}
-        {{#isAuthorized "SERVICE.MODIFY_CONFIGS"}}
+        {{#havePermissions "SERVICE.MODIFY_CONFIGS"}}
           <li class="pull-right">
-            <button type="button" class="btn btn-primary" {{action "createWidget" target="view"}} >
+            <button type="button" class="btn btn-primary" {{bindAttr disabled="App.router.wizardWatcherController.isNonWizardUser"}} {{action "createWidget" target="view"}} >
               <i class="glyphicon glyphicon-plus"></i> &nbsp; {{t dashboard.widgets.create}}
             </button>
           </li>
-        {{/isAuthorized}}
+        {{/havePermissions}}
       </ul>
 
       <!--Widgets table two column-->

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/templates/experimental.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/experimental.hbs b/ambari-web/app/templates/experimental.hbs
index c3d9eba..80d9e3d 100644
--- a/ambari-web/app/templates/experimental.hbs
+++ b/ambari-web/app/templates/experimental.hbs
@@ -56,14 +56,14 @@
   </div>
     <hr>
     <div class="control-group" style="margin-bottom: 100px;">
-      {{#isAuthorized "CLUSTER.MANAGE_USER_PERSISTED_DATA"}}
+      {{#havePermissions "CLUSTER.MANAGE_USER_PERSISTED_DATA"}}
         <div class="pull-left" style="margin-bottom: 100px;">
-          <button class="btn btn-danger" {{action doResetUIStates target="controller"}}>{{t reset.ui.states}}</button>
+          <button class="btn btn-danger" {{bindAttr disabled="App.router.wizardWatcherController.isNonWizardUser"}} {{action doResetUIStates target="controller"}}>{{t reset.ui.states}}</button>
           <p class="pull-right" style="margin-left: 10px; padding-top: 5px;">
               Reset UI state locally and on the server
           </p>
         </div>
-      {{/isAuthorized}}
+      {{/havePermissions}}
     </div>
   {{else}}
     <div class="alert alert-danger">

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/templates/main/admin/stack_upgrade/versions.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/stack_upgrade/versions.hbs b/ambari-web/app/templates/main/admin/stack_upgrade/versions.hbs
index edcb37c..47f9a2d 100644
--- a/ambari-web/app/templates/main/admin/stack_upgrade/versions.hbs
+++ b/ambari-web/app/templates/main/admin/stack_upgrade/versions.hbs
@@ -17,11 +17,11 @@
 }}
 
 <div id="versions-filter-section" class="btn-toolbar">
-  {{#isAuthorized "AMBARI.MANAGE_STACK_VERSIONS"}}
-    <button class="btn btn-primary" {{action goToVersions target="view"}} id="manage-versions-link">
+  {{#havePermissions "AMBARI.MANAGE_STACK_VERSIONS"}}
+    <button class="btn btn-primary" {{action goToVersions target="view"}}  {{bindAttr disabled="App.router.wizardWatcherController.isNonWizardUser"}} id="manage-versions-link">
         <i class="icon-external-link"></i>&nbsp;{{t admin.stackVersions.manageVersions}}
     </button>
-  {{/isAuthorized}}
+  {{/havePermissions}}
   <div class="btn-group display-inline-block">
     <button class="btn btn-default dropdown-toggle" data-toggle="dropdown" href="#">
       <span class="filters-label">{{t common.filter}}: </span>

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/templates/main/host.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/host.hbs b/ambari-web/app/templates/main/host.hbs
index b373c40..07a9e7f 100644
--- a/ambari-web/app/templates/main/host.hbs
+++ b/ambari-web/app/templates/main/host.hbs
@@ -22,9 +22,9 @@
     <h2 class="table-title col-sm-1">{{t common.hosts}}</h2>
     <div class="table-controls row col-sm-10 pull-right">
       <div class="col-sm-12">
-        {{#isAuthorized "HOST.ADD_DELETE_COMPONENTS, HOST.TOGGLE_MAINTENANCE, HOST.ADD_DELETE_HOSTS"}}
+        {{#havePermissions "HOST.ADD_DELETE_COMPONENTS, HOST.TOGGLE_MAINTENANCE, HOST.ADD_DELETE_HOSTS"}}
           {{view App.HostTableMenuView}}
-        {{/isAuthorized}}
+        {{/havePermissions}}
         <div class="col-sm-10 pull-right">
           {{outlet}}
         </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/templates/main/host/bulk_operation_menu.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/host/bulk_operation_menu.hbs b/ambari-web/app/templates/main/host/bulk_operation_menu.hbs
index 89551ca..d7bf8bd 100644
--- a/ambari-web/app/templates/main/host/bulk_operation_menu.hbs
+++ b/ambari-web/app/templates/main/host/bulk_operation_menu.hbs
@@ -16,7 +16,8 @@
 * limitations under the License.
 }}
 
-<button class="btn btn-success dropdown-toggle" data-toggle="dropdown" href="#">{{t common.actions}} <span class="caret"></span></button>
+<button class="btn btn-success dropdown-toggle" data-toggle="dropdown" href="#" {{bindAttr disabled="App.router.wizardWatcherController.isNonWizardUser"}}>{{t common.actions}} <span class="caret"></span></button>
+{{#unless App.router.wizardWatcherController.isNonWizardUser}}
 <ul class="dropdown-menu">
   {{#isAuthorized "HOST.ADD_DELETE_HOSTS"}}
     <li><a href="#" {{action addHost}}><i class="glyphicon glyphicon-plus glyphicon-white"></i> {{t hosts.host.add}}</a></li>
@@ -151,3 +152,4 @@
     </div>
   </li>
 </ul>
+{{/unless}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/templates/main/host/details/host_component.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/host/details/host_component.hbs b/ambari-web/app/templates/main/host/details/host_component.hbs
index d94dd3b..28d2acf 100644
--- a/ambari-web/app/templates/main/host/details/host_component.hbs
+++ b/ambari-web/app/templates/main/host/details/host_component.hbs
@@ -54,112 +54,114 @@
   </span>
 </div>
 <div class="col-md-5 col-lg-4">
-  {{#isAuthorized "SERVICE.DECOMMISSION_RECOMMISSION"}}
+  {{#havePermissions "SERVICE.DECOMMISSION_RECOMMISSION"}}
     <div class="dropdown">
-      <button {{ bindAttr class="view.disabled :btn :btn-default :btn-block :dropdown-toggle"}} data-toggle="dropdown">
+      <button {{ bindAttr class="view.disabled :btn :btn-default :btn-block :dropdown-toggle" disabled="App.router.wizardWatcherController.isNonWizardUser"}} data-toggle="dropdown">
         {{view.componentTextStatus}}
         <span class="caret pull-right button-caret-margin"></span>
       </button>
-      <ul class="dropdown-menu">
-        {{#if view.isComponentDecommissionAvailable}}
-          {{view view.decommissionView}}
-        {{/if}}
-        {{#if view.isComponentRecommissionAvailable}}
-          {{view view.decommissionView}}
-        {{/if}}
-        {{#if view.isRestartableComponent}}
-          <li {{bindAttr class="view.isRestartComponentDisabled:hidden"}}>
-            <a href="javascript:void(null)" data-toggle="modal" {{action "restartComponent" view.content target="controller"}}>
-              {{t common.restart}}
-            </a>
-          </li>
-        {{/if}}
-        {{#unless view.isInstalling}}
-          {{#isAuthorized "SERVICE.START_STOP"}}
-            {{#if view.isStart}}
-              <li {{bindAttr class="view.isComponentDecommissioning:hidden view.noActionAvailable"}}>
-                <a href="javascript:void(null)" data-toggle="modal" {{action "stopComponent" view.content target="controller"}}>
-                  {{t common.stop}}
+      {{#unless App.router.wizardWatcherController.isNonWizardUser}}
+        <ul class="dropdown-menu">
+          {{#if view.isComponentDecommissionAvailable}}
+            {{view view.decommissionView}}
+          {{/if}}
+          {{#if view.isComponentRecommissionAvailable}}
+            {{view view.decommissionView}}
+          {{/if}}
+          {{#if view.isRestartableComponent}}
+            <li {{bindAttr class="view.isRestartComponentDisabled:hidden"}}>
+              <a href="javascript:void(null)" data-toggle="modal" {{action "restartComponent" view.content target="controller"}}>
+                {{t common.restart}}
+              </a>
+            </li>
+          {{/if}}
+          {{#unless view.isInstalling}}
+            {{#isAuthorized "SERVICE.START_STOP"}}
+              {{#if view.isStart}}
+                <li {{bindAttr class="view.isComponentDecommissioning:hidden view.noActionAvailable"}}>
+                  <a href="javascript:void(null)" data-toggle="modal" {{action "stopComponent" view.content target="controller"}}>
+                    {{t common.stop}}
+                  </a>
+                </li>
+              {{/if}}
+              {{#unless view.isStart}}
+                {{#unless view.isInit}}
+                  <li {{bindAttr class="view.isUpgradeFailed:hidden view.isInstallFailed:hidden view.isDecommissioning:hidden view.noActionAvailable"}}>
+                    <a href="javascript:void(null)" data-toggle="modal" {{action "startComponent" view.content target="controller"}}>
+                      {{t common.start}}
+                    </a>
+                  </li>
+                {{/unless}}
+              {{/unless}}
+            {{/isAuthorized}}
+            {{#if view.isUpgradeFailed}}
+              <li {{bindAttr class="view.noActionAvailable"}}>
+                <a href="javascript:void(null)" data-toggle="modal" {{action "upgradeComponent" view.content target="controller"}}>
+                  {{t common.reUpgrade}}
+                </a>
+              </li>
+            {{/if}}
+            {{#if view.isInstallFailed}}
+              <li {{bindAttr class="view.noActionAvailable"}}>
+                <a href="javascript:void(null)" data-toggle="modal" {{action "installComponent" view.content target="controller"}}>
+                  {{t common.reinstall}}
                 </a>
               </li>
             {{/if}}
-            {{#unless view.isStart}}
-              {{#unless view.isInit}}
-                <li {{bindAttr class="view.isUpgradeFailed:hidden view.isInstallFailed:hidden view.isDecommissioning:hidden view.noActionAvailable"}}>
-                  <a href="javascript:void(null)" data-toggle="modal" {{action "startComponent" view.content target="controller"}}>
-                    {{t common.start}}
+            {{#if view.isReassignable}}
+              {{#isAuthorized "SERVICE.MOVE"}}
+                <li {{bindAttr class="view.noActionAvailable view.isMoveComponentDisabled:disabled"}}>
+                  <a href="javascript:void(null)" data-toggle="modal" {{action "moveComponent" view.content target="controller"}}>
+                    {{t common.move}}
                   </a>
                 </li>
-              {{/unless}}
-            {{/unless}}
-          {{/isAuthorized}}
-          {{#if view.isUpgradeFailed}}
-            <li {{bindAttr class="view.noActionAvailable"}}>
-              <a href="javascript:void(null)" data-toggle="modal" {{action "upgradeComponent" view.content target="controller"}}>
-                {{t common.reUpgrade}}
+              {{/isAuthorized}}
+            {{/if}}
+            {{#isAuthorized "HOST.TOGGLE_MAINTENANCE"}}
+              <li {{bindAttr class="view.noActionAvailable view.content.isImpliedState:disabled :allow-tooltip"}}
+                {{bindAttr data-original-title="view.maintenanceTooltip"}} rel="passiveTooltip" >
+              <a href="javascript:void(null)"
+                 data-toggle="modal" {{action "toggleMaintenanceMode" view.content target="controller"}}>
+                {{#if view.isActive}}
+                  {{t passiveState.turnOn}}
+                {{else}}
+                  {{t passiveState.turnOff}}
+                {{/if}}
               </a>
             </li>
-          {{/if}}
-          {{#if view.isInstallFailed}}
-            <li {{bindAttr class="view.noActionAvailable"}}>
+            {{/isAuthorized}}
+          {{/unless}}
+          {{#if view.isInit}}
+            <li>
               <a href="javascript:void(null)" data-toggle="modal" {{action "installComponent" view.content target="controller"}}>
                 {{t common.reinstall}}
               </a>
             </li>
           {{/if}}
-          {{#if view.isReassignable}}
-            {{#isAuthorized "SERVICE.MOVE"}}
-              <li {{bindAttr class="view.noActionAvailable view.isMoveComponentDisabled:disabled"}}>
-                <a href="javascript:void(null)" data-toggle="modal" {{action "moveComponent" view.content target="controller"}}>
-                  {{t common.move}}
-                </a>
+          {{#if view.isDeletableComponent}}
+            {{#isAuthorized "HOST.ADD_DELETE_COMPONENTS"}}
+              <li {{bindAttr class="view.isDeleteComponentDisabled:disabled"}}>
+                  <a href="javascript:void(null)" data-toggle="modal" {{action "deleteComponent" view.content target="controller"}}>
+                    {{t common.delete}}
+                  </a>
               </li>
             {{/isAuthorized}}
           {{/if}}
-          {{#isAuthorized "HOST.TOGGLE_MAINTENANCE"}}
-            <li {{bindAttr class="view.noActionAvailable view.content.isImpliedState:disabled :allow-tooltip"}}
-              {{bindAttr data-original-title="view.maintenanceTooltip"}} rel="passiveTooltip" >
-            <a href="javascript:void(null)"
-               data-toggle="modal" {{action "toggleMaintenanceMode" view.content target="controller"}}>
-              {{#if view.isActive}}
-                {{t passiveState.turnOn}}
-              {{else}}
-                {{t passiveState.turnOff}}
-              {{/if}}
-            </a>
-          </li>
-          {{/isAuthorized}}
-        {{/unless}}
-        {{#if view.isInit}}
-          <li>
-            <a href="javascript:void(null)" data-toggle="modal" {{action "installComponent" view.content target="controller"}}>
-              {{t common.reinstall}}
-            </a>
-          </li>
-        {{/if}}
-        {{#if view.isDeletableComponent}}
-          {{#isAuthorized "HOST.ADD_DELETE_COMPONENTS"}}
-            <li {{bindAttr class="view.isDeleteComponentDisabled:disabled"}}>
-                <a href="javascript:void(null)" data-toggle="modal" {{action "deleteComponent" view.content target="controller"}}>
-                  {{t common.delete}}
-                </a>
-            </li>
-          {{/isAuthorized}}
-        {{/if}}
-        {{#if view.isRefreshConfigsAllowed}}
-            <li>
-                <a href="javascript:void(null)" data-toggle="modal" {{action "refreshComponentConfigs" view.content target="controller"}}>
-                  {{t hosts.host.details.refreshConfigs}}
-                </a>
-            </li>
-        {{/if}}
+          {{#if view.isRefreshConfigsAllowed}}
+              <li>
+                  <a href="javascript:void(null)" data-toggle="modal" {{action "refreshComponentConfigs" view.content target="controller"}}>
+                    {{t hosts.host.details.refreshConfigs}}
+                  </a>
+              </li>
+          {{/if}}
 
-      {{#each command in view.customCommands}}
-        <li {{bindAttr class="command.disabled:disabled"}}>
-          <a href="javascript:void(null)" {{action "executeCustomCommand" command target="controller" href=true}}>{{command.label}}</a>
-        </li>
-      {{/each}}
-      </ul>
+        {{#each command in view.customCommands}}
+          <li {{bindAttr class="command.disabled:disabled"}}>
+            <a href="javascript:void(null)" {{action "executeCustomCommand" command target="controller" href=true}}>{{command.label}}</a>
+          </li>
+        {{/each}}
+        </ul>
+      {{/unless}}
     </div>
-  {{/isAuthorized}}
+  {{/havePermissions}}
 </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/templates/main/host/stack_versions.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/host/stack_versions.hbs b/ambari-web/app/templates/main/host/stack_versions.hbs
index 60e6571..cf167a4 100644
--- a/ambari-web/app/templates/main/host/stack_versions.hbs
+++ b/ambari-web/app/templates/main/host/stack_versions.hbs
@@ -54,9 +54,9 @@
           {{/if}}
         </td>
         <td class="install-repo-version align-center">
-          {{#isAuthorized "AMBARI.MANAGE_STACK_VERSIONS"}}
+          {{#havePermissions "AMBARI.MANAGE_STACK_VERSIONS"}}
             <button class="btn btn-default" {{action installVersionConfirmation version target="controller"}} {{bindAttr disabled="version.installDisabled"}}><i class="glyphicon glyphicon-off"></i>&nbsp;{{t common.install}}</button>
-          {{/isAuthorized}}
+          {{/havePermissions}}
         </td>
       </tr>
     {{/each}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/templates/main/host/summary.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/host/summary.hbs b/ambari-web/app/templates/main/host/summary.hbs
index 84a1c5e..3ceb92f 100644
--- a/ambari-web/app/templates/main/host/summary.hbs
+++ b/ambari-web/app/templates/main/host/summary.hbs
@@ -26,38 +26,40 @@
             <h4 class="panel-title">{{t common.components}}</h4>
           </div>
           <div class="col-md-6">
-            {{#isAuthorized "HOST.ADD_DELETE_COMPONENTS"}}
+            {{#havePermissions "HOST.ADD_DELETE_COMPONENTS"}}
               <div class="dropdown pull-right">
-                <button id="add_component" data-toggle="dropdown" {{bindAttr class="view.addComponentDisabled:disabled :btn :btn-default :dropdown-toggle"}}>
+                <button id="add_component" data-toggle="dropdown" {{bindAttr class="view.addComponentDisabled:disabled :btn :btn-default :dropdown-toggle" disabled="App.router.wizardWatcherController.isNonWizardUser"}}>
                   <span class="glyphicon glyphicon-plus"></span>&nbsp;{{t add}}
                 </button>
-                <ul class="dropdown-menu">
-                  {{#each component in view.addableComponents}}
-                    <li>
-                      <a href="javascript:void(null)" data-toggle="modal" {{action addComponentWithCheck component target="controller"}}>
-                        {{component.displayName}}
-                      </a>
-                    </li>
-                  {{/each}}
-                </ul>
+                {{#unless App.router.wizardWatcherController.isNonWizardUser}}
+                  <ul class="dropdown-menu">
+                    {{#each component in view.addableComponents}}
+                      <li>
+                        <a href="javascript:void(null)" data-toggle="modal" {{action addComponentWithCheck component target="controller"}}>
+                          {{component.displayName}}
+                        </a>
+                      </li>
+                    {{/each}}
+                  </ul>
+                {{/unless}}
               </div>
-            {{/isAuthorized}}
+            {{/havePermissions}}
           </div>
         </div>
       </div>
       <div class="host-components panel-body">
           {{#if view.sortedComponents.length}}
 
-              {{#isAuthorized "SERVICE.MODIFY_CONFIGS, SERVICE.START_STOP, HOST.ADD_DELETE_COMPONENTS, HOST.TOGGLE_MAINTENANCE"}}
+              {{#havePermissions "SERVICE.MODIFY_CONFIGS, SERVICE.START_STOP, HOST.ADD_DELETE_COMPONENTS, HOST.TOGGLE_MAINTENANCE"}}
                   {{#if view.content.componentsWithStaleConfigsCount}}
                     <div class="alert alert-warning clearfix restart-required">
                       <i class="glyphicon glyphicon-refresh"></i> {{view.needToRestartMessage}}
-                      <button {{bindAttr class=":btn :restart-components :pull-right :btn-warning"}} {{action restartAllStaleConfigComponents target="controller"}}>
+                      <button {{bindAttr class=":btn :restart-components :pull-right :btn-warning" disabled="App.router.wizardWatcherController.isNonWizardUser"}} {{action restartAllStaleConfigComponents target="controller"}}>
                           {{t hosts.host.details.needToRestart.button}}
                       </button>
                     </div>
                   {{/if}}
-              {{/isAuthorized}}
+              {{/havePermissions}}
 
               {{#each component in view.sortedComponents}}
                 {{view component.viewClass classNames="row row-no-pad" contentBinding="component"}}
@@ -89,58 +91,60 @@
               {{/if}}
           </div>
           <div class="col-md-5 col-lg-4 pull-right">
-              {{#isAuthorized "SERVICE.MODIFY_CONFIGS, SERVICE.START_STOP, HOST.ADD_DELETE_COMPONENTS, HOST.TOGGLE_MAINTENANCE"}}
+              {{#havePermissions "SERVICE.MODIFY_CONFIGS, SERVICE.START_STOP, HOST.ADD_DELETE_COMPONENTS, HOST.TOGGLE_MAINTENANCE"}}
                   {{#if view.clients.length}}
                     <div class="dropdown">
                       <button id="add_component"
-                              data-toggle="dropdown" {{bindAttr class=":btn :btn-default :btn-block :dropdown-toggle controller.content.isNotHeartBeating:disabled"}}>
+                              data-toggle="dropdown" {{bindAttr disabled="App.router.wizardWatcherController.isNonWizardUser" class=":btn :btn-default :btn-block :dropdown-toggle controller.content.isNotHeartBeating:disabled"}}>
                           {{t common.installed}}
                         <span class="caret pull-right button-caret-margin"></span>
                       </button>
-                      <ul class="dropdown-menu">
-                        <li>
-                          <a href="javascript:void(null)"
-                             data-toggle="modal" {{action refreshConfigs view.clients target="controller"}}>
-                              {{t hosts.host.details.refreshConfigs}}
-                          </a>
-                        </li>
-                        <li>
-                          <a href="javascript:void(null)" {{bindAttr class="view.areClientsNotInstalled::disabled" }}
-                             data-toggle="modal" {{action installClients target="view"}}>
-                              {{t host.host.details.installClients}}
-                          </a>
-                        </li>
-                          {{#if view.anyClientFailedToInstall}}
-                            <li>
-                              <a href="javascript:void(null)" {{action reinstallClients target="view"}}>
-                                  {{t host.host.details.reinstallClients}}
-                              </a>
-                            </li>
-                          {{/if}}
-                          {{#each option in view.clientsWithCustomCommands}}
-                            <li class="dropdown-submenu submenu-left">
-                              <a href="javascript:void(null)">
-                                <i class="glyphicon glyphicon-play-circle"></i>
-                                  {{option.label}}
-                              </a>
+                      {{#unless App.router.wizardWatcherController.isNonWizardUser}}
+                        <ul class="dropdown-menu">
+                          <li>
+                            <a href="javascript:void(null)"
+                               data-toggle="modal" {{action refreshConfigs view.clients target="controller"}}>
+                                {{t hosts.host.details.refreshConfigs}}
+                            </a>
+                          </li>
+                          <li>
+                            <a href="javascript:void(null)" {{bindAttr class="view.areClientsNotInstalled::disabled" }}
+                               data-toggle="modal" {{action installClients target="view"}}>
+                                {{t host.host.details.installClients}}
+                            </a>
+                          </li>
+                            {{#if view.anyClientFailedToInstall}}
+                              <li>
+                                <a href="javascript:void(null)" {{action reinstallClients target="view"}}>
+                                    {{t host.host.details.reinstallClients}}
+                                </a>
+                              </li>
+                            {{/if}}
+                            {{#each option in view.clientsWithCustomCommands}}
+                              <li class="dropdown-submenu submenu-left">
+                                <a href="javascript:void(null)">
+                                  <i class="glyphicon glyphicon-play-circle"></i>
+                                    {{option.label}}
+                                </a>
 
-                              <div class="dropdown-menu-wrap">
-                                <ul class="dropdown-menu">
-                                    {{#each command in option.commands}}
-                                      <li>
-                                        <a href="javascript:void(null)" {{action "executeCustomCommand" command target="controller" href=true}}>
-                                          {{command.label}}
-                                        </a>
-                                      </li>
-                                    {{/each}}
-                                </ul>
-                              </div>
-                            </li>
-                          {{/each}}
-                      </ul>
+                                <div class="dropdown-menu-wrap">
+                                  <ul class="dropdown-menu">
+                                      {{#each command in option.commands}}
+                                        <li>
+                                          <a href="javascript:void(null)" {{action "executeCustomCommand" command target="controller" href=true}}>
+                                            {{command.label}}
+                                          </a>
+                                        </li>
+                                      {{/each}}
+                                  </ul>
+                                </div>
+                              </li>
+                            {{/each}}
+                        </ul>
+                      {{/unless}}
                     </div>
                   {{/if}}
-              {{/isAuthorized}}
+              {{/havePermissions}}
           </div>
         </div>
       </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/utils/helper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/helper.js b/ambari-web/app/utils/helper.js
index 3bc247c..baacdf3 100644
--- a/ambari-web/app/utils/helper.js
+++ b/ambari-web/app/utils/helper.js
@@ -445,6 +445,29 @@ Em.Handlebars.registerHelper('isAuthorized', function (property, options) {
 });
 
 /**
+ * Usage:
+ *
+ * <pre>
+ *   {{#havePermissions "SERVICE.TOGGLE_ALERTS"}}
+ *     {{! some truly code }}
+ *   {{else}}
+ *     {{! some falsy code }}
+ *   {{/havePermissions}}
+ * </pre>
+ */
+Em.Handlebars.registerHelper('havePermissions', function (property, options) {
+  var permission = Ember.Object.create({
+    havePermissions: function() {
+      return App.havePermissions(property);
+    }.property()
+  });
+
+  // wipe out contexts so boundIf uses `this` (the permission) as the context
+  options.contexts = null;
+  return Ember.Handlebars.helpers.boundIf.call(permission, "havePermissions", options);
+});
+
+/**
  * @namespace App
  */
 App = require('app');

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/app/views/main/service/item.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/item.js b/ambari-web/app/views/main/service/item.js
index 45c783b..945dc8f 100644
--- a/ambari-web/app/views/main/service/item.js
+++ b/ambari-web/app/views/main/service/item.js
@@ -319,7 +319,7 @@ App.MainServiceItemView = Em.View.extend({
   }.property('maintenance'),
 
   hasConfigTab: function() {
-    return App.isAuthorized('CLUSTER.VIEW_CONFIGS', {ignoreWizard: true}) && !App.get('services.noConfigTypes').contains(this.get('controller.content.serviceName'));
+    return App.havePermissions('CLUSTER.VIEW_CONFIGS') && !App.get('services.noConfigTypes').contains(this.get('controller.content.serviceName'));
   }.property('controller.content.serviceName','App.services.noConfigTypes'),
 
   hasHeatmapTab: function() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/84342f6a/ambari-web/test/views/main/service/item_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/service/item_test.js b/ambari-web/test/views/main/service/item_test.js
index e4a1940..b86d021 100644
--- a/ambari-web/test/views/main/service/item_test.js
+++ b/ambari-web/test/views/main/service/item_test.js
@@ -614,7 +614,7 @@ describe('App.MainServiceItemView', function () {
 
   describe('#hasConfigTab', function() {
     beforeEach(function() {
-      this.mockAuthorized = sinon.stub(App, 'isAuthorized');
+      this.mockAuthorized = sinon.stub(App, 'havePermissions');
       this.mockGet = sinon.stub(App, 'get').returns(['S2']);
     });
     afterEach(function() {
@@ -622,7 +622,7 @@ describe('App.MainServiceItemView', function () {
       this.mockGet.restore();
     });
 
-    it('should return false when not authorized', function() {
+    it('should return false when have not permissions', function() {
       this.mockAuthorized.returns(false);
       view.set('controller.content.serviceName', 'S1');
       expect(view.get('hasConfigTab')).to.be.false;
@@ -634,7 +634,7 @@ describe('App.MainServiceItemView', function () {
       expect(view.get('hasConfigTab')).to.be.false;
     });
 
-    it('should return true when authorized', function() {
+    it('should return true when have permissions', function() {
       this.mockAuthorized.returns(true);
       view.set('controller.content.serviceName', 'S1');
       expect(view.get('hasConfigTab')).to.be.true;


[43/50] [abbrv] ambari git commit: Revert "AMBARI-22258. Use correct property attribute setter fn(). while setting 'tez.runtime.io.sort.mb' maximum value."

Posted by ao...@apache.org.
Revert "AMBARI-22258. Use correct property attribute setter fn(). while setting 'tez.runtime.io.sort.mb' maximum value."

This reverts commit b9f26708dd19c444918ea9b21150f66236fcdf2d.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/37d59f28
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/37d59f28
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/37d59f28

Branch: refs/heads/branch-3.0-perf
Commit: 37d59f281e682c17e0a34aa7e6d9a09c927febca
Parents: c9c96cd
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Wed Oct 18 11:15:12 2017 -0700
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Wed Oct 18 11:15:12 2017 -0700

----------------------------------------------------------------------
 .../src/main/resources/stacks/HDP/2.5/services/stack_advisor.py | 3 +--
 .../src/test/python/stacks/2.5/common/test_stack_advisor.py     | 5 +----
 2 files changed, 2 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/37d59f28/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index b6f2478..92ce9b9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -860,7 +860,6 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     putHiveInteractiveEnvProperty = self.putProperty(configurations, "hive-interactive-env", services)
     putHiveInteractiveEnvPropertyAttribute = self.putPropertyAttribute(configurations, "hive-interactive-env")
     putTezInteractiveSiteProperty = self.putProperty(configurations, "tez-interactive-site", services)
-    putTezInteractiveSitePropertyAttribute = self.putPropertyAttribute(configurations, "tez-interactive-site")
     llap_daemon_selected_queue_name = None
     selected_queue_is_ambari_managed_llap = None  # Queue named 'llap' at root level is Ambari managed.
     llap_selected_queue_am_percent = None
@@ -1332,7 +1331,7 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     putTezInteractiveSiteProperty('tez.runtime.io.sort.mb', tez_runtime_io_sort_mb)
     if "tez-site" in services["configurations"] and "tez.runtime.sorter.class" in services["configurations"]["tez-site"]["properties"]:
       if services["configurations"]["tez-site"]["properties"]["tez.runtime.sorter.class"] == "LEGACY":
-        putTezInteractiveSitePropertyAttribute("tez.runtime.io.sort.mb", "maximum", 1800)
+        putTezInteractiveSiteProperty("tez.runtime.io.sort.mb", "maximum", 1800)
 
     putTezInteractiveSiteProperty('tez.runtime.unordered.output.buffer.size-mb', tez_runtime_unordered_output_buffer_size)
     putHiveInteractiveSiteProperty('hive.auto.convert.join.noconditionaltask.size', hive_auto_convert_join_noconditionaltask_size)

http://git-wip-us.apache.org/repos/asf/ambari/blob/37d59f28/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
index 407e78d..cf462de 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
@@ -4446,8 +4446,7 @@ class TestHDP25StackAdvisor(TestCase):
           },
         "tez-site": {
           "properties": {
-            "tez.am.resource.memory.mb": "1024",
-            "tez.runtime.sorter.class": "LEGACY"
+            "tez.am.resource.memory.mb": "1024"
           }
         },
       }
@@ -4482,8 +4481,6 @@ class TestHDP25StackAdvisor(TestCase):
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.io.memory.size'], '186368')
     self.assertEqual(configurations['hive-interactive-env']['properties']['llap_heap_size'], '9830')
-    self.assertEqual(configurations['tez-interactive-site']['properties']['tez.runtime.io.sort.mb'], '1092')
-    self.assertEquals(configurations['tez-interactive-site']['property_attributes']['tez.runtime.io.sort.mb'], {'maximum': '1800'})
 
 
 


[31/50] [abbrv] ambari git commit: AMBARI-22248. HBase default.rootdir config results in deploy failure if value is not overriden. (swagle)

Posted by ao...@apache.org.
AMBARI-22248. HBase default.rootdir config results in deploy failure if value is not overriden. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e19029cb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e19029cb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e19029cb

Branch: refs/heads/branch-3.0-perf
Commit: e19029cbd419fc6512350c365e43faff60d4acc9
Parents: 79d4ac2
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Mon Oct 16 17:57:42 2017 -0700
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Mon Oct 16 17:57:42 2017 -0700

----------------------------------------------------------------------
 .../HBASE/0.96.0.2.0/configuration/hbase-site.xml  | 17 +++++++++--------
 1 file changed, 9 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e19029cb/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-site.xml b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-site.xml
index 54ab35d..a48aad4 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-site.xml
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-site.xml
@@ -22,15 +22,16 @@
 <configuration supports_final="true">
   <property>
     <name>hbase.rootdir</name>
-    <value>hdfs://localhost:8020/apps/hbase/data</value>
+    <value>/apps/hbase/data</value>
     <description>The directory shared by region servers and into
-    which HBase persists.  The URL should be 'fully-qualified'
-    to include the filesystem scheme.  For example, to specify the
-    HDFS directory '/hbase' where the HDFS instance's namenode is
-    running at namenode.example.org on port 9000, set this value to:
-    hdfs://namenode.example.org:9000/hbase.  By default HBase writes
-    into /tmp.  Change this configuration else all data will be lost
-    on machine restart.
+      which HBase persists.  The URL should be 'fully-qualified'
+      to include the filesystem scheme.  For example, to specify the
+      HDFS directory '/hbase' where the HDFS instance's namenode is
+      running at namenode.example.org on port 9000, set this value to:
+      hdfs://namenode.example.org:9000/hbase.  By default HBase writes
+      into /tmp.  Change this configuration else all data will be lost
+      on machine restart. Ambari sets this a scheme less value which defaults to
+      a HDFS relative path.
     </description>
     <on-ambari-upgrade add="false"/>
   </property>


[46/50] [abbrv] ambari git commit: AMBARI-22268. Remove "Hotfix" from Admin Register Version. (Ishan Bhatt via yusaku)

Posted by ao...@apache.org.
AMBARI-22268. Remove "Hotfix" from Admin Register Version. (Ishan Bhatt via yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2ae81d99
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2ae81d99
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2ae81d99

Branch: refs/heads/branch-3.0-perf
Commit: 2ae81d99a55756bd5240a90d550d34ec2ff73927
Parents: 8852f33
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Wed Oct 18 15:12:07 2017 -0700
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Wed Oct 18 15:12:07 2017 -0700

----------------------------------------------------------------------
 .../src/main/resources/ui/admin-web/app/scripts/i18n.config.js     | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2ae81d99/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
index 1711543..183a276 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
@@ -344,7 +344,7 @@ angular.module('ambariAdminConsole')
     'users.alerts.usersEffectivePrivilege': '{{user_name}}\'s effective privilege through its Group(s) is higher than your selected privilege.',
 
     'versions.current': 'Current',
-    'versions.addVersion': 'Add Version or Hotfix',
+    'versions.addVersion': 'Add Version',
     'versions.defaultVersion': '(Default Version Definition)',
     'versions.inUse': 'In Use',
     'versions.installed': 'Installed',


[08/50] [abbrv] ambari git commit: AMBARI-22221. Fix initialization of Druid Script object. (nishantmonu51)

Posted by ao...@apache.org.
AMBARI-22221. Fix initialization of Druid Script object. (nishantmonu51)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d88d3cc0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d88d3cc0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d88d3cc0

Branch: refs/heads/branch-3.0-perf
Commit: d88d3cc06f5355802a95db35c26e383502b658c7
Parents: 527e45d
Author: Nishant <ni...@gmail.com>
Authored: Thu Oct 12 21:20:11 2017 +0530
Committer: Nishant <ni...@gmail.com>
Committed: Thu Oct 12 22:37:26 2017 +0530

----------------------------------------------------------------------
 .../common-services/DRUID/0.10.1/package/scripts/druid_node.py      | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d88d3cc0/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/druid_node.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/druid_node.py b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/druid_node.py
index 8053dcb..8e1a44f 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/druid_node.py
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/druid_node.py
@@ -32,6 +32,7 @@ from druid import druid, get_daemon_cmd, getPid
 
 class DruidBase(Script):
   def __init__(self, nodeType=None):
+    Script.__init__(self)
     self.nodeType = nodeType
 
   def install(self, env):


[33/50] [abbrv] ambari git commit: AMBARI-22246 Restart HDFS/JOURNALNODE failed on revert patch upgrade(dgrinenko)

Posted by ao...@apache.org.
AMBARI-22246 Restart HDFS/JOURNALNODE failed on revert patch upgrade(dgrinenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ce29f8ce
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ce29f8ce
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ce29f8ce

Branch: refs/heads/branch-3.0-perf
Commit: ce29f8ce7762ed6c6d5c2f50d882b6e03b23acfc
Parents: d6cd303
Author: Dmytro Grinenko <ha...@apache.org>
Authored: Tue Oct 17 16:53:47 2017 +0300
Committer: Dmytro Grinenko <ha...@apache.org>
Committed: Tue Oct 17 16:53:47 2017 +0300

----------------------------------------------------------------------
 .../ambari/server/state/UpgradeContext.java     | 55 ++++++++++++++------
 .../ambari/server/state/UpgradeContextTest.java | 38 +++++++++++++-
 2 files changed, 75 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ce29f8ce/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
index de0f868..cb44adb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
@@ -261,6 +261,38 @@ public class UpgradeContext {
    */
   private RepositoryType m_orchestration = RepositoryType.STANDARD;
 
+  /**
+   * Reading upgrade type from provided request  or if nothing were provided,
+   * from previous upgrade for downgrade direction.
+   *
+   * @param upgradeRequestMap arguments provided for current upgrade request
+   * @param upgradeEntity previous upgrade entity, should be passed only for downgrade direction
+   *
+   * @return
+   * @throws AmbariException
+   */
+  private UpgradeType calculateUpgradeType(Map<String, Object> upgradeRequestMap,
+                                           UpgradeEntity upgradeEntity) throws AmbariException{
+
+    UpgradeType upgradeType = UpgradeType.ROLLING;
+
+    String upgradeTypeProperty = (String) upgradeRequestMap.get(UPGRADE_TYPE);
+    boolean upgradeTypePassed = StringUtils.isNotBlank(upgradeTypeProperty);
+
+    if (upgradeTypePassed){
+      try {
+        upgradeType = UpgradeType.valueOf(upgradeRequestMap.get(UPGRADE_TYPE).toString());
+      } catch (Exception e) {
+        throw new AmbariException(String.format("Property %s has an incorrect value of %s.",
+          UPGRADE_TYPE, upgradeTypeProperty));
+      }
+    } else if (upgradeEntity != null){
+      upgradeType = upgradeEntity.getUpgradeType();
+    }
+
+    return upgradeType;
+  }
+
   @AssistedInject
   public UpgradeContext(@Assisted Cluster cluster,
       @Assisted Map<String, Object> upgradeRequestMap, Gson gson, UpgradeHelper upgradeHelper,
@@ -271,23 +303,7 @@ public class UpgradeContext {
     m_upgradeHelper = upgradeHelper;
     m_upgradeDAO = upgradeDAO;
     m_repoVersionDAO = repoVersionDAO;
-
     m_cluster = cluster;
-
-    // determine upgrade type (default is ROLLING)
-    String upgradeTypeProperty = (String) upgradeRequestMap.get(UPGRADE_TYPE);
-    if (StringUtils.isNotBlank(upgradeTypeProperty)) {
-      try {
-        m_type = UpgradeType.valueOf(upgradeRequestMap.get(UPGRADE_TYPE).toString());
-      } catch (Exception e) {
-        throw new AmbariException(String.format("Property %s has an incorrect value of %s.",
-            UPGRADE_TYPE, upgradeTypeProperty));
-      }
-    } else {
-      // default type
-      m_type= UpgradeType.ROLLING;
-    }
-
     m_isRevert = upgradeRequestMap.containsKey(UPGRADE_REVERT_UPGRADE_ID);
 
     if (m_isRevert) {
@@ -316,13 +332,15 @@ public class UpgradeContext {
             "Only successfully completed upgrades can be reverted. Downgrades cannot be reverted.");
       }
 
-      if (revertableUpgrade.getId() != revertUpgrade.getId()) {
+      if (!revertableUpgrade.getId().equals(revertUpgrade.getId())) {
         throw new AmbariException(String.format(
             "The only upgrade which is currently allowed to be reverted for cluster %s is upgrade ID %s which was an upgrade to %s",
             cluster.getClusterName(), revertableUpgrade.getId(),
             revertableUpgrade.getRepositoryVersion().getVersion()));
       }
 
+      m_type = calculateUpgradeType(upgradeRequestMap, revertUpgrade);
+
       // !!! build all service-specific reversions
       Set<RepositoryVersionEntity> priors = new HashSet<>();
       Map<String, Service> clusterServices = cluster.getServices();
@@ -382,6 +400,8 @@ public class UpgradeContext {
                     UPGRADE_REPO_VERSION_ID, m_direction));
           }
 
+          m_type = calculateUpgradeType(upgradeRequestMap, null);
+
           // depending on the repository, add services
           m_repositoryVersion = m_repoVersionDAO.findByPK(Long.valueOf(repositoryVersionId));
           m_orchestration = m_repositoryVersion.getType();
@@ -396,6 +416,7 @@ public class UpgradeContext {
 
           m_repositoryVersion = upgrade.getRepositoryVersion();
           m_orchestration = upgrade.getOrchestration();
+          m_type = calculateUpgradeType(upgradeRequestMap, upgrade);
 
           // populate the repository maps for all services in the upgrade
           for (UpgradeHistoryEntity history : upgrade.getHistory()) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/ce29f8ce/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeContextTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeContextTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeContextTest.java
index 5176ffe..b38b272 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeContextTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeContextTest.java
@@ -317,7 +317,6 @@ public class UpgradeContextTest extends EasyMockSupport {
         EasyMock.anyObject(StackId.class), EasyMock.anyObject(Direction.class),
         EasyMock.anyObject(UpgradeType.class), EasyMock.anyString())).andReturn(upgradePack).once();
 
-
     expect(m_upgradeDAO.findRevertable(1L)).andReturn(m_completedRevertableUpgrade).once();
 
     Map<String, Object> requestMap = new HashMap<>();
@@ -337,6 +336,43 @@ public class UpgradeContextTest extends EasyMockSupport {
     verifyAll();
   }
 
+
+  /**
+   * Tests that the {@link UpgradeContext} for a EU reversion has the correct
+   * parameters set.
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testRevertEU() throws Exception {
+    ConfigHelper configHelper = createNiceMock(ConfigHelper.class);
+    UpgradeHelper upgradeHelper = createNiceMock(UpgradeHelper.class);
+    UpgradePack upgradePack = createNiceMock(UpgradePack.class);
+
+    expect(upgradeHelper.suggestUpgradePack(EasyMock.anyString(), EasyMock.anyObject(StackId.class),
+      EasyMock.anyObject(StackId.class), EasyMock.anyObject(Direction.class),
+      EasyMock.anyObject(UpgradeType.class), EasyMock.anyString())).andReturn(upgradePack).once();
+
+    expect(m_upgradeDAO.findRevertable(1L)).andReturn(m_completedRevertableUpgrade).once();
+    expect(m_completedRevertableUpgrade.getUpgradeType()).andReturn(UpgradeType.NON_ROLLING);
+
+    Map<String, Object> requestMap = new HashMap<>();
+    requestMap.put(UpgradeResourceProvider.UPGRADE_REVERT_UPGRADE_ID, "1");
+
+    replayAll();
+
+    UpgradeContext context = new UpgradeContext(m_cluster, requestMap, null, upgradeHelper,
+      m_upgradeDAO, m_repositoryVersionDAO, configHelper);
+
+    assertEquals(Direction.DOWNGRADE, context.getDirection());
+    assertEquals(RepositoryType.PATCH, context.getOrchestrationType());
+    assertEquals(UpgradeType.NON_ROLLING, context.getType());
+    assertEquals(1, context.getSupportedServices().size());
+    assertTrue(context.isPatchRevert());
+
+    verifyAll();
+  }
+
   /**
    * Tests that the {@link UpgradeContext} for a reversion has the correct
    * services included in the reversion if one of the services in the original


[32/50] [abbrv] ambari git commit: AMBARI-22234 Optimizing Ranger KMS imports (mugdha)

Posted by ao...@apache.org.
AMBARI-22234 Optimizing Ranger KMS imports (mugdha)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d6cd303d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d6cd303d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d6cd303d

Branch: refs/heads/branch-3.0-perf
Commit: d6cd303d0207ac6ffcdbc16d129758bba9494423
Parents: e19029c
Author: Mugdha Varadkar <mu...@apache.org>
Authored: Tue Oct 17 11:22:05 2017 +0530
Committer: Mugdha Varadkar <mu...@apache.org>
Committed: Tue Oct 17 11:24:13 2017 +0530

----------------------------------------------------------------------
 .../0.5.0.2.3/package/scripts/kms_server.py     | 21 ++++++++++----------
 .../1.0.0.3.0/package/scripts/kms_server.py     | 19 +++++++++---------
 2 files changed, 21 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d6cd303d/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
index fcf2478..0b37489 100755
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
@@ -27,9 +27,10 @@ from resource_management.libraries.functions.format import format
 from resource_management.core.logger import Logger
 from resource_management.core import shell
 from resource_management.libraries.functions.default import default
-from kms import kms, setup_kms_db, setup_java_patch, enable_kms_plugin, setup_kms_jce, update_password_configs
 from kms_service import kms_service
 
+import kms
+
 class KmsServer(Script):
 
   def install(self, env):
@@ -44,9 +45,9 @@ class KmsServer(Script):
       sudo = True
     )
 
-    setup_kms_db()
+    kms.setup_kms_db()
     self.configure(env)
-    setup_java_patch()
+    kms.setup_java_patch()
 
   def stop(self, env, upgrade_type=None):
     import params
@@ -63,9 +64,9 @@ class KmsServer(Script):
 
     env.set_params(params)
     self.configure(env)
-    enable_kms_plugin()
-    setup_kms_jce()
-    update_password_configs()
+    kms.enable_kms_plugin()
+    kms.setup_kms_jce()
+    kms.update_password_configs()
     kms_service(action = 'start', upgrade_type=upgrade_type)
 
   def status(self, env):
@@ -87,15 +88,15 @@ class KmsServer(Script):
     import params
 
     env.set_params(params)
-    kms()
+    kms.kms()
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     stack_select.select_packages(params.version)
-    kms(upgrade_type=upgrade_type)
-    setup_java_patch()
+    kms.kms(upgrade_type=upgrade_type)
+    kms.setup_java_patch()
 
   def setup_ranger_kms_database(self, env):
     import params
@@ -107,7 +108,7 @@ class KmsServer(Script):
 
     stack_version = upgrade_stack[1]
     Logger.info(format('Setting Ranger KMS database schema, using version {stack_version}'))
-    setup_kms_db(stack_version=stack_version)
+    kms.setup_kms_db(stack_version=stack_version)
     
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6cd303d/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/package/scripts/kms_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/package/scripts/kms_server.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/package/scripts/kms_server.py
index 4c313c4..4708da1 100755
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/package/scripts/kms_server.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/package/scripts/kms_server.py
@@ -27,10 +27,11 @@ from resource_management.libraries.functions.format import format
 from resource_management.core.logger import Logger
 from resource_management.core import shell
 from resource_management.libraries.functions.default import default
-from kms import kms, setup_kms_db, setup_java_patch, enable_kms_plugin, setup_kms_jce
 from kms_service import kms_service
 import upgrade
 
+import kms
+
 class KmsServer(Script):
 
   def install(self, env):
@@ -38,9 +39,9 @@ class KmsServer(Script):
     import params
     env.set_params(params)
 
-    setup_kms_db()
+    kms.setup_kms_db()
     self.configure(env)
-    setup_java_patch()
+    kms.setup_java_patch()
 
   def stop(self, env, upgrade_type=None):
     import params
@@ -57,8 +58,8 @@ class KmsServer(Script):
 
     env.set_params(params)
     self.configure(env)
-    enable_kms_plugin()
-    setup_kms_jce()
+    kms.enable_kms_plugin()
+    kms.setup_kms_jce()
     kms_service(action = 'start', upgrade_type=upgrade_type)
 
   def status(self, env):
@@ -80,15 +81,15 @@ class KmsServer(Script):
     import params
 
     env.set_params(params)
-    kms()
+    kms.kms()
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     upgrade.prestart(env)
-    kms(upgrade_type=upgrade_type)
-    setup_java_patch()
+    kms.kms(upgrade_type=upgrade_type)
+    kms.setup_java_patch()
 
   def setup_ranger_kms_database(self, env):
     import params
@@ -100,7 +101,7 @@ class KmsServer(Script):
 
     stack_version = upgrade_stack[1]
     Logger.info(format('Setting Ranger KMS database schema, using version {stack_version}'))
-    setup_kms_db(stack_version=stack_version)
+    kms.setup_kms_db(stack_version=stack_version)
 
   def get_log_folder(self):
     import params