You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ol...@apache.org on 2018/07/18 15:06:09 UTC

[ambari] branch trunk updated: AMBARI-24298 - fix preupload for zeppelin dependecies (#1770)

This is an automated email from the ASF dual-hosted git repository.

oleewere pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/trunk by this push:
     new def026c  AMBARI-24298 - fix preupload for zeppelin dependecies (#1770)
def026c is described below

commit def026cb4dabba76273aea12a1a22a905ddf4053
Author: Gabor Boros <63...@users.noreply.github.com>
AuthorDate: Wed Jul 18 17:06:07 2018 +0200

    AMBARI-24298 - fix preupload for zeppelin dependecies (#1770)
    
    Change-Id: Ic85e6237563d10f53e311248fb458c13c2480790
---
 ambari-server/src/main/resources/scripts/Ambaripreupload.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/ambari-server/src/main/resources/scripts/Ambaripreupload.py b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
index ee8fd00..1ee3c1c 100644
--- a/ambari-server/src/main/resources/scripts/Ambaripreupload.py
+++ b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
@@ -281,7 +281,7 @@ with Environment() as env:
   def copy_zeppelin_dependencies_to_hdfs(file_pattern):
     spark_deps_full_path = glob.glob(file_pattern)
     if spark_deps_full_path and os.path.exists(spark_deps_full_path[0]):
-      copy_tarballs_to_hdfs(spark_deps_full_path[0], hdfs_path_prefix+'/apps/zeppelin/', 'hadoop-mapreduce-historyserver', params.hdfs_user, 'zeppelin', 'zeppelin')
+      copy_tarballs_to_hdfs(spark_deps_full_path[0], hdfs_path_prefix+'/apps/zeppelin/', params.hdfs_user, 'zeppelin', 'zeppelin')
     else:
       Logger.info('zeppelin-spark-dependencies not found at %s.' % file_pattern)