You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2017/06/06 10:59:35 UTC
[2/2] ambari git commit: AMBARI-21070. Race condition: webhdfs call
mkdir /tmp/druid-indexing before /tmp making tmp not writable. (aonishuk)
AMBARI-21070. Race condition: webhdfs call mkdir /tmp/druid-indexing before /tmp making tmp not writable. (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f585da99
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f585da99
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f585da99
Branch: refs/heads/branch-2.5
Commit: f585da99ecad8b4d6a15b19688226a18a6fbc79f
Parents: be980bb
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Jun 6 13:59:32 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Jun 6 13:59:32 2017 +0300
----------------------------------------------------------------------
.../DRUID/0.9.2/package/scripts/druid.py | 7 ++++++
.../DRUID/0.9.2/package/scripts/params.py | 1 +
.../test/python/stacks/2.6/DRUID/test_druid.py | 23 +++++++++++++++++++-
.../test/python/stacks/2.6/configs/default.json | 3 ++-
4 files changed, 32 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/f585da99/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py
index 18febeb..cc02519 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py
@@ -159,6 +159,13 @@ def ensure_hadoop_directories():
# Create HadoopIndexTask hadoopWorkingPath
hadoop_working_path = druid_middlemanager_config['druid.indexer.task.hadoopWorkingPath']
if hadoop_working_path is not None:
+ if hadoop_working_path.startswith(params.hdfs_tmp_dir):
+ params.HdfsResource(params.hdfs_tmp_dir,
+ type="directory",
+ action="create_on_execute",
+ owner=params.hdfs_user,
+ mode=0777,
+ )
create_hadoop_directory(hadoop_working_path)
# If HDFS is used for storing logs, create Index Task log directory
http://git-wip-us.apache.org/repos/asf/ambari/blob/f585da99/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
index b1d9472..d47e604 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
@@ -125,6 +125,7 @@ hdfs_principal_name = default('/configurations/hadoop-env/hdfs_principal_name',
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
dfs_type = default("/commandParams/dfs_type", "")
+hdfs_tmp_dir = config['configurations']['hadoop-env']['hdfs_tmp_dir']
# Kerberos
druid_principal_name = default('/configurations/druid-common/druid.hadoop.security.kerberos.principal',
http://git-wip-us.apache.org/repos/asf/ambari/blob/f585da99/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py b/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
index f35fd8e..41b0bf5 100644
--- a/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
+++ b/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
@@ -622,7 +622,28 @@ class TestDruid(RMFTestCase):
dfs_type = '',
mode=0755
)
-
+ self.assertResourceCalled('HdfsResource', '/tmp',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+ keytab = UnknownConfigurationMock(),
+ dfs_type = '',
+ default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+ hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+ hdfs_site = {u'a': u'b'},
+ kinit_path_local = '/usr/bin/kinit',
+ principal_name = 'missing_principal',
+ user = 'hdfs',
+ owner = 'hdfs',
+ hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ immutable_paths = [u'/apps/hive/warehouse',
+ u'/apps/falcon',
+ u'/mr-history/done',
+ u'/app-logs',
+ u'/tmp'],
+ mode = 0777,
+ )
self.assertResourceCalled('HdfsResource', '/tmp/druid-indexing',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
http://git-wip-us.apache.org/repos/asf/ambari/blob/f585da99/ambari-server/src/test/python/stacks/2.6/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/configs/default.json b/ambari-server/src/test/python/stacks/2.6/configs/default.json
index 54000e6..2570657 100644
--- a/ambari-server/src/test/python/stacks/2.6/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.6/configs/default.json
@@ -54,7 +54,8 @@
"content": "\n #\n #\n # Licensed to the Apache Software Foundation (ASF) under one\n # or more contributor license agreements. See the NOTICE file\n # distributed with this work for additional information\n # regarding copyright ownership. The ASF licenses this file\n # to you under the Apache License, Version 2.0 (the\n # \"License\"); you may not use this file except in compliance\n # with the License. You may obtain a copy of the License at\n #\n # http://www.apache.org/licenses/LICENSE-2.0\n #\n # Unless required by applicable law or agreed to in writing,\n # software distributed under the License is distributed on an\n # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n # KIND, either express or implied. See the License for the\n # specific language governing permissions a
nd limitations\n # under the License.\n #\n #\n #\n\n # Set everything to be logged to the console\n log4j.rootCategory=WARN, console\n log4j.appender.console=org.apache.log4j.ConsoleAppender\n log4j.appender.console.target=System.err\n log4j.appender.console.layout=org.apache.log4j.PatternLayout\n log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n # Settings to quiet third party logs that are too verbose\n log4j.logger.org.eclipse.jetty=WARN\n log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=WARN\n log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=WARN"
},
"hadoop-env": {
- "hdfs_user": "hdfs"
+ "hdfs_user": "hdfs",
+ "hdfs_tmp_dir": "/tmp"
},
"core-site": {
"fs.defaultFS": "hdfs://c6401.ambari.apache.org:8020"