You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@dlab.apache.org by my...@apache.org on 2020/01/13 13:40:03 UTC

[incubator-dlab] branch DLAB-1409 created (now 3f32a0f)

This is an automated email from the ASF dual-hosted git repository.

mykolabodnar pushed a change to branch DLAB-1409
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git.


      at 3f32a0f  [DLAB-1409] - Scala version is actual on Jupyter UI for DES/Jupyter

This branch includes the following new commits:

     new 3f32a0f  [DLAB-1409] - Scala version is actual on Jupyter UI for DES/Jupyter

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org


[incubator-dlab] 01/01: [DLAB-1409] - Scala version is actual on Jupyter UI for DES/Jupyter

Posted by my...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mykolabodnar pushed a commit to branch DLAB-1409
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git

commit 3f32a0fda357f3871af73a0955ce2715e978b697
Author: Mykola_Bodnar1 <bo...@gmail.com>
AuthorDate: Mon Jan 13 15:39:32 2020 +0200

    [DLAB-1409] - Scala version is actual on Jupyter UI for DES/Jupyter
---
 .../scripts/aws/jupyter_dataengine-service_create_configs.py      | 2 +-
 .../src/general/scripts/gcp/dataengine-service_jars_parser.py     | 6 ++++++
 .../scripts/gcp/jupyter_dataengine-service_create_configs.py      | 4 ++--
 .../scripts/gcp/jupyter_install_dataengine-service_kernels.py     | 8 ++++++--
 .../src/general/scripts/os/jupyter_dataengine_create_configs.py   | 2 +-
 5 files changed, 16 insertions(+), 6 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
index 9af6935..7103301 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
@@ -85,7 +85,7 @@ def r_kernel(args):
 
 def toree_kernel(args):
     spark_path = '/opt/' + args.emr_version + '/' + args.cluster_name + '/spark/'
-    scala_version = local('scala -e "println(scala.util.Properties.versionNumberString)"', capture=True)
+    scala_version = local('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture=True)
     if args.emr_version == 'emr-4.3.0' or args.emr_version == 'emr-4.6.0' or args.emr_version == 'emr-4.8.0':
         local('mkdir -p ' + kernels_dir + 'toree_' + args.cluster_name + '/')
         kernel_path = kernels_dir + "toree_" + args.cluster_name + "/kernel.json"
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_jars_parser.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_jars_parser.py
index 4fd5a4a..c240f6e 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_jars_parser.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_jars_parser.py
@@ -56,6 +56,11 @@ if __name__ == "__main__":
     spark_ver = subprocess.check_output("dpkg -l | grep spark-core | tr -s ' ' '-' | cut -f 4 -d '-'", shell=True).decode('UTF-8')
     with open('/tmp/spark_version', 'w') as outfile:
         outfile.write(spark_ver)
+    os.system('touch /tmp/scala_version')
+    scala_ver = subprocess.check_output("spark-submit --version 2>&1 | grep -o -P 'Scala version \K.{0,7}'",
+                                        shell=True).decode('UTF-8')
+    with open('/tmp/scala_version', 'w') as outfile:
+        outfile.write(scala_ver)
     os.system('touch /tmp/hadoop_version')
     hadoop_ver = subprocess.check_output("dpkg -l | grep hadoop | head -n 1 | tr -s ' ' '-' | cut -f 3 -d '-'", shell=True).decode('UTF-8')
     with open('/tmp/hadoop_version', 'w') as outfile:
@@ -79,6 +84,7 @@ if __name__ == "__main__":
     os.system('gsutil -m cp {0} gs://{1}/{2}/{3}/'.format(spark_def_path, args.bucket, args.user_name, args.cluster_name))
     os.system('gsutil -m cp /tmp/python_version gs://{0}/{1}/{2}/'.format(args.bucket, args.user_name, args.cluster_name))
     os.system('gsutil -m cp /tmp/spark_version gs://{0}/{1}/{2}/'.format(args.bucket, args.user_name, args.cluster_name))
+    os.system('gsutil -m cp /tmp/scala_version gs://{0}/{1}/{2}/'.format(args.bucket, args.user_name, args.cluster_name))
     os.system('gsutil -m cp /tmp/r_version gs://{0}/{1}/{2}/'.format(args.bucket, args.user_name, args.cluster_name))
     os.system('gsutil -m cp /tmp/hadoop_version gs://{0}/{1}/{2}/'.format(args.bucket, args.user_name, args.cluster_name))
     os.system('gsutil -m cp /tmp/spark.tar.gz gs://{0}/{1}/{2}/'.format(args.bucket, args.user_name, args.cluster_name))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_dataengine-service_create_configs.py
index 959f9b5..9bc8e37 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_dataengine-service_create_configs.py
@@ -50,6 +50,7 @@ parser.add_argument('--pip_mirror', type=str, default='')
 parser.add_argument('--application', type=str, default='')
 parser.add_argument('--r_version', type=str, default='')
 parser.add_argument('--r_enabled', type=str, default='')
+parser.add_argument('--scala_version', type=str, default='')
 args = parser.parse_args()
 
 dataproc_dir = '/opt/{}/jars/'.format(args.dataproc_version)
@@ -79,7 +80,6 @@ def r_kernel(args):
 
 def toree_kernel(args):
     spark_path = '/opt/{0}/{1}/spark/'.format(args.dataproc_version, args.cluster_name)
-    scala_version = local('scala -e "println(scala.util.Properties.versionNumberString)"', capture=True)
     local('mkdir -p {0}toree_{1}/'.format(kernels_dir, args.cluster_name))
     local('tar zxvf /tmp/toree_kernel.tar.gz -C {0}toree_{1}/'.format(kernels_dir, args.cluster_name))
     local('sudo mv {0}toree_{1}/toree-0.2.0-incubating/* {0}toree_{1}/'.format(kernels_dir, args.cluster_name))
@@ -93,7 +93,7 @@ def toree_kernel(args):
     text = text.replace('SPARK_PATH', spark_path)
     text = text.replace('OS_USER', args.os_user)
     text = text.replace('DATAENGINE-SERVICE_VERSION', args.dataproc_version)
-    text = text.replace('SCALA_VERSION', scala_version)
+    text = text.replace('SCALA_VERSION', args.scala_version)
     with open(kernel_path, 'w') as f:
         f.write(text)
     local('touch /tmp/kernel_var.json')
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_install_dataengine-service_kernels.py
index 710974c..cb17668 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_install_dataengine-service_kernels.py
@@ -70,6 +70,10 @@ def configure_notebook(args):
 
 
 if __name__ == "__main__":
+    GCPActions().get_from_bucket(args.bucket, '{0}/{1}/scala_version'.format(args.project_name, args.cluster_name),
+                                 '/tmp/scala_version')
+    with file('/tmp/scala_version') as f:
+        scala_version = str(f.read()).replace(',', '')
     env.hosts = "{}".format(args.notebook_ip)
     env.user = args.os_user
     env.key_filename = "{}".format(args.keyfile)
@@ -84,7 +88,7 @@ if __name__ == "__main__":
     r_enabled = os.environ['notebook_r_enabled']
     sudo('echo "[global]" > /etc/pip.conf; echo "proxy = $(cat /etc/profile | grep proxy | head -n1 | cut -f2 -d=)" >> /etc/pip.conf')
     sudo('echo "use_proxy=yes" > ~/.wgetrc; proxy=$(cat /etc/profile | grep proxy | head -n1 | cut -f2 -d=); echo "http_proxy=$proxy" >> ~/.wgetrc; echo "https_proxy=$proxy" >> ~/.wgetrc')
-    sudo('unset http_proxy https_proxy; export gcp_project_id="{0}"; export conf_resource="{1}"; /usr/bin/python /usr/local/bin/create_configs.py --bucket {2} --cluster_name {3} --dataproc_version {4} --spark_version {5} --hadoop_version {6} --region {7} --user_name {8} --os_user {9} --pip_mirror {10} --application {11} --r_version {12} --r_enabled {13}'
+    sudo('unset http_proxy https_proxy; export gcp_project_id="{0}"; export conf_resource="{1}"; /usr/bin/python /usr/local/bin/create_configs.py --bucket {2} --cluster_name {3} --dataproc_version {4} --spark_version {5} --hadoop_version {6} --region {7} --user_name {8} --os_user {9} --pip_mirror {10} --application {11} --r_version {12} --r_enabled {13} --scala_version {14}'
          .format(os.environ['gcp_project_id'], os.environ['conf_resource'], args.bucket, args.cluster_name,
                  args.dataproc_version, spark_version, hadoop_version, args.region, args.project_name, args.os_user,
-                 args.pip_mirror, args.application, r_version, r_enabled))
+                 args.pip_mirror, args.application, r_version, r_enabled, scala_version))
diff --git a/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
index 5b4a51c..60a3246 100644
--- a/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
@@ -79,7 +79,7 @@ def r_kernel(args):
 
 def toree_kernel(args):
     spark_path = '/opt/' + args.cluster_name + '/spark/'
-    scala_version = local('scala -e "println(scala.util.Properties.versionNumberString)"', capture=True)
+    scala_version = local('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture=True)
     local('mkdir -p ' + kernels_dir + 'toree_' + args.cluster_name + '/')
     local('tar zxvf /tmp/{}/toree_kernel.tar.gz -C '.format(args.cluster_name) + kernels_dir + 'toree_' + args.cluster_name + '/')
     local('sudo mv {0}toree_{1}/toree-0.2.0-incubating/* {0}toree_{1}/'.format(kernels_dir, args.cluster_name))


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org