You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by mh...@apache.org on 2021/11/26 15:28:31 UTC

[incubator-datalab] 01/01: 2605

This is an automated email from the ASF dual-hosted git repository.

mhladun pushed a commit to branch DATALAB-2605
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit 0fc0fa280ca2607e71f5b2e362c56391181e7728
Author: Marian_Hladun <ma...@gmail.com>
AuthorDate: Fri Nov 26 17:28:14 2021 +0200

    2605
---
 infrastructure-provisioning/src/general/lib/aws/actions_lib.py | 1 +
 infrastructure-provisioning/src/general/lib/os/fab.py          | 7 -------
 2 files changed, 1 insertion(+), 7 deletions(-)

diff --git a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
index 567c3b6..dca9c9b 100644
--- a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
@@ -1455,6 +1455,7 @@ def install_emr_spark(args):
             print("The checksum of spark.tar.gz is mismatched. It could be caused by aws network issue.")
             sys.exit(1)
     subprocess.run('sudo tar -zhxvf /tmp/spark.tar.gz -C /opt/' + args.emr_version + '/' + args.cluster_name + '/', shell=True, check=True)
+    subprocess.run('sudo cp -R /opt/spark/R/lib/SparkR /opt/' + args.emr_version + '/' + args.cluster_name + '/spark/R/lib/', shell=True, check=True)
 
 
 def jars(args, emr_dir):
diff --git a/infrastructure-provisioning/src/general/lib/os/fab.py b/infrastructure-provisioning/src/general/lib/os/fab.py
index 22215e6..7cc2a2d 100644
--- a/infrastructure-provisioning/src/general/lib/os/fab.py
+++ b/infrastructure-provisioning/src/general/lib/os/fab.py
@@ -509,13 +509,6 @@ def ensure_dataengine_tensorflow_jars(jars_dir):
          -O {}spark-tensorflow-connector-1.0.0-s_2.11.jar'.format(jars_dir), shell=True, check=True)
 
 
-def prepare(dataengine_service_dir, yarn_dir):
-    subprocess.run('mkdir -p ' + dataengine_service_dir, shell=True, check=True)
-    subprocess.run('mkdir -p ' + yarn_dir, shell=True, check=True)
-    subprocess.run('sudo mkdir -p /opt/python/', shell=True, check=True)
-    result = os.path.exists(dataengine_service_dir + 'usr/')
-    return result
-
 def install_r_pkg(requisites):
     status = list()
     error_parser = "ERROR:|error:|Cannot|failed|Please run|requires|Error|Skipping|couldn't find"

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org