You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/08/19 12:46:14 UTC
[incubator-datalab] 01/01: [DATALAB-2396]: fixed download path for
connector, DATALAB-2054 remade disk handling function
This is an automated email from the ASF dual-hosted git repository.
lfrolov pushed a commit to branch DATALAB-2396
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git
commit 4e6a326ee3f3077ea3150bda81a7d1cfc11c954c
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Thu Aug 19 15:45:17 2021 +0300
[DATALAB-2396]: fixed download path for connector, DATALAB-2054 remade disk handling function
---
.../src/general/lib/azure/actions_lib.py | 64 +++++++++++-----------
.../src/general/lib/os/fab.py | 2 +-
2 files changed, 34 insertions(+), 32 deletions(-)
diff --git a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
index f5924d2..66859ff 100644
--- a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
@@ -1082,7 +1082,7 @@ def ensure_local_jars(os_user, jars_dir):
datalab.fab.conn.sudo('wget https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-azure-datalake/{0}/hadoop-azure-datalake-{0}.jar \
-O {1}hadoop-azure-datalake-{0}.jar'.format('3.0.0', jars_dir))
if os.environ['application'] == 'tensor' or os.environ['application'] == 'deeplearning':
- datalab.fab.conn.sudo('wget https://dl.bintray.com/spark-packages/maven/tapanalyticstoolkit/spark-tensorflow-connector/{0}/spark-tensorflow-connector-{0}.jar \
+ datalab.fab.conn.sudo('wget https://repos.spark-packages.org/tapanalyticstoolkit/spark-tensorflow-connector/{0}/spark-tensorflow-connector-{0}.jar \
-O {1}spark-tensorflow-connector-{0}.jar'.format('1.0.0-s_2.11', jars_dir))
datalab.fab.conn.sudo('touch /home/{}/.ensure_dir/local_jars_ensured'.format(os_user))
except Exception as err:
@@ -1248,36 +1248,38 @@ def prepare_disk(os_user):
allow = False
counter = 0
remount_azure_disk()
- disk_name = datalab.fab.conn.sudo("lsblk | grep disk | awk '{print $1}' | sort | tail -n 1").stdout.replace('\n','')
- datalab.fab.conn.sudo('umount -l /dev/{}1'.format(disk_name), warn=True)
- while not allow:
- if counter > 4:
- print("Unable to prepare disk")
- sys.exit(1)
- else:
- out = datalab.fab.conn.sudo('''bash -c 'echo -e "o\nn\np\n1\n\n\nw" | fdisk /dev/{} 2>&1' '''.format(
- disk_name)).stdout
- if 'Syncing disks' in out:
- allow = True
- elif 'The kernel still uses the old table.' in out:
- if datalab.fab.conn.sudo('partprobe').stdout:
- datalab.fab.conn.sudo('reboot', warn=True)
- allow = True
- else:
- counter += 1
- time.sleep(5)
- datalab.fab.conn.sudo('umount -l /dev/{}1'.format(disk_name), warn=True)
- try:
- datalab.fab.conn.sudo('mkfs.ext4 -F /dev/{}1'.format(disk_name))
- except:
- out = datalab.fab.conn.sudo('mount -l | grep /dev/{}1'.format(disk_name)).stdout
- if 'type ext4' in out:
- pass
- else:
- sys.exit(1)
- datalab.fab.conn.sudo('mount /dev/{}1 /opt/'.format(disk_name))
- datalab.fab.conn.sudo(''' bash -c "echo '/dev/{}1 /opt/ ext4 errors=remount-ro 0 1' >> /etc/fstab" '''.format(
- disk_name))
+ disk_names = datalab.fab.conn.sudo("lsblk | grep disk | awk '{print $1}' | sort").stdout.split('\n')
+ for disk in disk_names:
+ if disk != '' and disk not in datalab.fab.conn.sudo('lsblk | grep part').stdout:
+ while not allow:
+ if counter > 4:
+ print("Unable to prepare disk")
+ sys.exit(1)
+ else:
+ out = datalab.fab.conn.sudo(
+ '''bash -c 'echo -e "o\nn\np\n1\n\n\nw" | fdisk /dev/{} 2>&1' '''.format(
+ disk)).stdout
+ if 'Syncing disks' in out:
+ allow = True
+ elif 'The kernel still uses the old table.' in out:
+ if datalab.fab.conn.sudo('partprobe').stdout:
+ datalab.fab.conn.sudo('reboot', warn=True)
+ allow = True
+ else:
+ counter += 1
+ time.sleep(5)
+ datalab.fab.conn.sudo('umount -l /dev/{}1'.format(disk), warn=True)
+ try:
+ datalab.fab.conn.sudo('mkfs.ext4 -F /dev/{}1'.format(disk))
+ except:
+ out = datalab.fab.conn.sudo('mount -l | grep /dev/{}1'.format(disk)).stdout
+ if 'type ext4' in out:
+ pass
+ else:
+ sys.exit(1)
+ datalab.fab.conn.sudo('mount /dev/{}1 /opt/'.format(disk))
+ datalab.fab.conn.sudo(''' bash -c "echo '/dev/{}1 /opt/ ext4 errors=remount-ro 0 1' >> /etc/fstab" '''.format(disk))
+
datalab.fab.conn.sudo('touch /home/' + os_user + '/.ensure_dir/disk_ensured')
except Exception as err:
traceback.print_exc()
diff --git a/infrastructure-provisioning/src/general/lib/os/fab.py b/infrastructure-provisioning/src/general/lib/os/fab.py
index f10a0a1..ff1ea4d 100644
--- a/infrastructure-provisioning/src/general/lib/os/fab.py
+++ b/infrastructure-provisioning/src/general/lib/os/fab.py
@@ -211,7 +211,7 @@ def id_generator(size=10, chars=string.digits + string.ascii_letters):
def ensure_dataengine_tensorflow_jars(jars_dir):
- subprocess.run('wget https://dl.bintray.com/spark-packages/maven/tapanalyticstoolkit/spark-tensorflow-connector/1.0.0-s_2.11/spark-tensorflow-connector-1.0.0-s_2.11.jar \
+ subprocess.run('wget https://repos.spark-packages.org/tapanalyticstoolkit/spark-tensorflow-connector/1.0.0-s_2.11/spark-tensorflow-connector-1.0.0-s_2.11.jar \
-O {}spark-tensorflow-connector-1.0.0-s_2.11.jar'.format(jars_dir), shell=True, check=True)
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org