You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2020/11/25 10:59:28 UTC

[incubator-datalab] branch DATALAB-2162 created (now bce0d47)

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a change to branch DATALAB-2162
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git.


      at bce0d47  [DATALAB-2162]: changed dlab to datalab

This branch includes the following new commits:

     new e5fe2be  [DATALAB-2162]: merged odahu_integration into develop clone, skiped changes in java files from DATALAB-1650
     new bce0d47  [DATALAB-2162]: changed dlab to datalab

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org


[incubator-datalab] 01/02: [DATALAB-2162]: merged odahu_integration into develop clone, skiped changes in java files from DATALAB-1650

Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2162
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit e5fe2be83fffb5b2da6ecd8e9de2ea86af2a0a82
Merge: dd5d9ad bbd54b1
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Wed Nov 25 12:37:26 2020 +0200

    [DATALAB-2162]: merged odahu_integration into develop clone, skiped changes in java files from DATALAB-1650

 .../src/general/lib/aws/actions_lib.py             |  15 ++
 .../src/general/scripts/aws/odahu_deploy.py        | 293 +++++++++++++++++++++
 .../src/general/scripts/aws/odahu_prepare.py       | 158 +++++++++++
 .../src/ssn/scripts/configure_docker.py            |  30 ++-
 .../epam/datalab/backendapi/dao/OdahuDAOImpl.java  | 166 ++++++++++++
 .../service/impl/OdahuServiceImplTest.java         | 236 +++++++++++++++++
 6 files changed, 897 insertions(+), 1 deletion(-)

diff --cc infrastructure-provisioning/src/ssn/scripts/configure_docker.py
index d16f5cd,e2f617c..897f276
--- a/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
@@@ -78,8 -81,33 +78,33 @@@ def add_china_repository(datalab_path)
          sudo('sed -i "/pip install/s/jupyter/ipython==5.0.0 jupyter==1.0.0/g" Dockerfile')
          sudo('sed -i "22i COPY general/files/os/debian/sources.list /etc/apt/sources.list" Dockerfile')
  
+ def login_in_gcr(os_user, gcr_creds, odahu_image, dlab_path, cloud_provider):
+     if os.environ['conf_cloud_provider'] != 'gcp':
+         try:
+             sudo('echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] https://packages.cloud.google.com/apt '
+                   'cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list')
+             sudo('apt-get -y install apt-transport-https ca-certificates gnupg')
+             sudo('curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add -')
+             sudo('apt-get update')
+             sudo('apt-get -y install google-cloud-sdk')
+         except Exception as err:
+             traceback.print_exc()
+             print('Failed to install gcloud: ', str(err))
+             sys.exit(1)
+     try:
+         with open('/tmp/config', 'w') as f:
+             f.write(base64.b64decode(gcr_creds))
+         local('scp -i {} /tmp/config {}:/tmp/config'.format(args.keyfile, env.host_string, os_user))
+         sudo('mkdir /home/{}/.docker'.format(os_user))
+         sudo('cp /tmp/config /home/{}/.docker/config.json'.format(os_user))
+         sudo('sed -i "s|ODAHU_IMAGE|{}|" {}sources/infrastructure-provisioning/src/general/files/{}/odahu_Dockerfile'
+              .format(odahu_image, dlab_path, cloud_provider))
+     except Exception as err:
+         traceback.print_exc()
+         print('Failed to prepare odahu image: ', str(err))
+         sys.exit(1)
  
 -def build_docker_images(image_list, region, dlab_path):
 +def build_docker_images(image_list, region, datalab_path):
      try:
          if os.environ['conf_cloud_provider'] == 'azure':
              local('scp -i {} /root/azure_auth.json {}:{}sources/infrastructure-provisioning/src/base/'
@@@ -174,12 -201,15 +199,15 @@@ if __name__ == "__main__"
      download_toree()
  
      print("Installing docker daemon")
 -    if not ensure_docker_daemon(args.dlab_path, args.os_user, args.region):
 +    if not ensure_docker_daemon(args.datalab_path, args.os_user, args.region):
          sys.exit(1)
  
-     print("Building DataLab images")
+     print("Login in Google Container Registry")
+     login_in_gcr(args.os_user, args.gcr_creds, args.odahu_image, args.dlab_path, args.cloud_provider)
+ 
+     print("Building dlab images")
      count = 0
 -    while not build_docker_images(deeper_config, args.region, args.dlab_path) and count < 5:
 +    while not build_docker_images(deeper_config, args.region, args.datalab_path) and count < 5:
          count += 1
          time.sleep(5)
  
diff --cc services/self-service/src/main/java/com/epam/datalab/backendapi/dao/OdahuDAOImpl.java
index 0000000,0000000..9ba3297
new file mode 100644
--- /dev/null
+++ b/services/self-service/src/main/java/com/epam/datalab/backendapi/dao/OdahuDAOImpl.java
@@@ -1,0 -1,0 +1,166 @@@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ *
++ *   http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied.  See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++
++package com.epam.datalab.backendapi.dao;
++
++import com.epam.datalab.backendapi.domain.OdahuDTO;
++import com.epam.datalab.backendapi.domain.OdahuFieldsDTO;
++import com.epam.datalab.backendapi.domain.ProjectDTO;
++import com.epam.datalab.dto.ResourceURL;
++import com.epam.datalab.dto.UserInstanceStatus;
++import com.epam.datalab.dto.base.odahu.OdahuResult;
++import com.epam.datalab.exceptions.DatalabException;
++import com.fasterxml.jackson.core.type.TypeReference;
++import com.mongodb.BasicDBObject;
++import com.mongodb.client.result.UpdateResult;
++import org.bson.Document;
++import org.bson.conversions.Bson;
++
++import java.util.ArrayList;
++import java.util.LinkedHashMap;
++import java.util.List;
++import java.util.Map;
++import java.util.Optional;
++import java.util.stream.Collectors;
++
++import static com.mongodb.client.model.Filters.and;
++import static com.mongodb.client.model.Filters.eq;
++import static com.mongodb.client.model.Projections.elemMatch;
++import static com.mongodb.client.model.Projections.excludeId;
++import static com.mongodb.client.model.Projections.fields;
++import static com.mongodb.client.model.Projections.include;
++import static com.mongodb.client.model.Updates.push;
++import static java.util.stream.Collectors.toList;
++
++public class OdahuDAOImpl extends BaseDAO implements OdahuDAO {
++
++    private static final String PROJECTS_COLLECTION = "Projects";
++    private static final String ENDPOINTS = "endpoints";
++    private static final String ODAHU_FIELD = "odahu";
++    private static final String NAME_FIELD = "name";
++    private static final String ENDPOINT_FIELD = "endpoint";
++    private static final String PROJECT_FIELD = "project";
++    private static final String STATUS_FIELD = "status";
++    private static final String GRAFANA_ADMIN_FIELD = "grafana_admin";
++    private static final String GRAFANA_PASSWORD_FIELD = "grafana_pass";
++    private static final String OAUTH_COOKIE_SECRET_FIELD = "oauth_cookie_secret";
++    private static final String DECRYPT_TOKEN_FIELD = "odahuflow_connection_decrypt_token";
++    private static final String URLS_FIELD = "urls";
++    private static final String COMPUTATIONAL_URL_DESC = "description";
++    private static final String COMPUTATIONAL_URL_URL = "url";
++
++    @Override
++    public Optional<OdahuDTO> getByProjectEndpoint(String project, String endpoint) {
++        Optional<ProjectDTO> projectDTO = findOne(PROJECTS_COLLECTION, odahuProjectEndpointCondition(project, endpoint),
++                fields(include(ODAHU_FIELD), excludeId()),
++                ProjectDTO.class);
++
++        return projectDTO.flatMap(p -> p.getOdahu().stream()
++                .filter(odahu -> project.equals(odahu.getProject()) && endpoint.equals(odahu.getEndpoint()))
++                .findAny());
++    }
++
++    @Override
++    public List<OdahuDTO> findOdahuClusters(String project, String endpoint) {
++        Optional<ProjectDTO> projectDTO = findOne(PROJECTS_COLLECTION, odahuProjectEndpointCondition(project, endpoint),
++                fields(include(ODAHU_FIELD), excludeId()),
++                ProjectDTO.class);
++
++        return projectDTO.map(p -> p.getOdahu().stream()
++                .filter(odahu -> project.equals(odahu.getProject()) && endpoint.equals(odahu.getEndpoint()))
++                .collect(Collectors.toList()))
++                .orElseThrow(() -> new DatalabException("Unable to find the odahu clusters in the " + project));
++    }
++
++    @Override
++    public OdahuFieldsDTO getFields(String name, String project, String endpoint) {
++        Document odahuDocument = findOne(PROJECTS_COLLECTION, odahuProjectEndpointCondition(name, project, endpoint),
++                fields(include(ODAHU_FIELD), excludeId()))
++                .orElseThrow(() -> new DatalabException(project.toString() + " does not contain odahu " + name.toString() + " cluster"));
++
++        List<OdahuFieldsDTO> list = convertFromDocument(odahuDocument.get(ODAHU_FIELD, ArrayList.class), new TypeReference<List<OdahuFieldsDTO>>() {});
++        return list.stream()
++                .filter(odahuFieldsDTO -> name.equals(odahuFieldsDTO.getName()))
++                .findAny()
++                .orElseThrow(() -> new DatalabException("Unable to find the " + name + " cluster fields"));
++    }
++
++    @Override
++    public List<OdahuDTO> findOdahuClusters() {
++        List<ProjectDTO> projectDTOS = find(PROJECTS_COLLECTION, ProjectDTO.class);
++        return projectDTOS.stream()
++                .map(ProjectDTO::getOdahu)
++                .flatMap(List::stream)
++                .collect(toList());
++    }
++
++    @Override
++    public boolean create(OdahuDTO odahuDTO) {
++        UpdateResult updateResult = updateOne(PROJECTS_COLLECTION, projectEndpointCondition(odahuDTO.getProject(),
++                odahuDTO.getEndpoint()),
++                push(ODAHU_FIELD, convertToBson(odahuDTO)));
++        return updateResult.getModifiedCount() > 0;
++    }
++
++    @Override
++    public void updateStatus(String name, String project, String endpoint, UserInstanceStatus status) {
++        BasicDBObject dbObject = new BasicDBObject();
++        dbObject.put(ODAHU_FIELD + ".$." + STATUS_FIELD, status.name());
++        updateOne(PROJECTS_COLLECTION, and(elemMatch(ODAHU_FIELD, eq(NAME_FIELD, name)),
++                odahuProjectEndpointCondition(project, endpoint)), new Document(SET, dbObject));
++    }
++
++    @Override
++    public void updateStatusAndUrls(OdahuResult result, UserInstanceStatus status) {
++        BasicDBObject dbObject = new BasicDBObject();
++        dbObject.put(ODAHU_FIELD + ".$." + STATUS_FIELD, status.name());
++        dbObject.put(ODAHU_FIELD + ".$." + URLS_FIELD, getResourceUrlData(result.getResourceUrls()));
++        dbObject.put(ODAHU_FIELD + ".$." + GRAFANA_ADMIN_FIELD, result.getGrafanaAdmin());
++        dbObject.put(ODAHU_FIELD + ".$." + GRAFANA_PASSWORD_FIELD, result.getGrafanaPassword());
++        dbObject.put(ODAHU_FIELD + ".$." + OAUTH_COOKIE_SECRET_FIELD, result.getOauthCookieSecret());
++        dbObject.put(ODAHU_FIELD + ".$." + DECRYPT_TOKEN_FIELD, result.getDecryptToken());
++        updateOne(PROJECTS_COLLECTION, odahuProjectEndpointCondition(result.getName(), result.getProjectName(), result.getEndpointName()),
++                new Document(SET, dbObject));
++    }
++
++    private Bson odahuProjectEndpointCondition(String name, String projectName, String endpointName) {
++        return and(elemMatch(ODAHU_FIELD, eq(NAME_FIELD, name)), odahuProjectEndpointCondition(projectName, endpointName));
++    }
++
++    private Bson odahuProjectEndpointCondition(String projectName, String endpointName) {
++        return elemMatch(ODAHU_FIELD, and(eq(ENDPOINT_FIELD, endpointName), eq(PROJECT_FIELD, projectName)));
++    }
++
++    private Bson projectEndpointCondition(String projectName, String endpointName) {
++        return and(eq(NAME_FIELD, projectName), and(elemMatch(ENDPOINTS, eq(NAME_FIELD, endpointName))));
++    }
++
++    private List<Map<String, String>> getResourceUrlData(List<ResourceURL> urls) {
++        return urls.stream()
++                .map(this::toUrlDocument)
++                .collect(toList());
++    }
++
++    private LinkedHashMap<String, String> toUrlDocument(ResourceURL url) {
++        LinkedHashMap<String, String> map = new LinkedHashMap<>();
++        map.put(COMPUTATIONAL_URL_URL, url.getUrl());
++        map.put(COMPUTATIONAL_URL_DESC, url.getDescription());
++        return map;
++    }
++}
diff --cc services/self-service/src/test/java/com/epam/datalab/backendapi/service/impl/OdahuServiceImplTest.java
index 0000000,1d0f30f..1d0f30f
mode 000000,100644..100644
--- a/services/self-service/src/test/java/com/epam/datalab/backendapi/service/impl/OdahuServiceImplTest.java
+++ b/services/self-service/src/test/java/com/epam/datalab/backendapi/service/impl/OdahuServiceImplTest.java


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org


[incubator-datalab] 02/02: [DATALAB-2162]: changed dlab to datalab

Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2162
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit bce0d476867528d07de5bacada568b99410ee0ca
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Wed Nov 25 12:59:02 2020 +0200

    [DATALAB-2162]: changed dlab to datalab
---
 .../src/general/scripts/aws/odahu_deploy.py                       | 6 +++---
 .../src/general/scripts/aws/odahu_prepare.py                      | 6 +++---
 infrastructure-provisioning/src/ssn/scripts/configure_docker.py   | 8 ++++----
 3 files changed, 10 insertions(+), 10 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/aws/odahu_deploy.py b/infrastructure-provisioning/src/general/scripts/aws/odahu_deploy.py
index 1324b4b..ed3ce25 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/odahu_deploy.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/odahu_deploy.py
@@ -24,9 +24,9 @@
 import logging
 import json
 import sys
-from dlab.fab import *
-from dlab.meta_lib import *
-from dlab.actions_lib import *
+from datalab.fab import *
+from datalab.meta_lib import *
+from datalab.actions_lib import *
 import os
 import base64
 
diff --git a/infrastructure-provisioning/src/general/scripts/aws/odahu_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/odahu_prepare.py
index 72e38d4..15dbfb9 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/odahu_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/odahu_prepare.py
@@ -25,9 +25,9 @@ import logging
 import json
 import sys
 import requests
-from dlab.fab import *
-from dlab.meta_lib import *
-from dlab.actions_lib import *
+from datalab.fab import *
+from datalab.meta_lib import *
+from datalab.actions_lib import *
 import os
 
 if __name__ == "__main__":
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_docker.py b/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
index 897f276..9cd867d 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
@@ -78,7 +78,7 @@ def add_china_repository(datalab_path):
         sudo('sed -i "/pip install/s/jupyter/ipython==5.0.0 jupyter==1.0.0/g" Dockerfile')
         sudo('sed -i "22i COPY general/files/os/debian/sources.list /etc/apt/sources.list" Dockerfile')
 
-def login_in_gcr(os_user, gcr_creds, odahu_image, dlab_path, cloud_provider):
+def login_in_gcr(os_user, gcr_creds, odahu_image, datalab_path, cloud_provider):
     if os.environ['conf_cloud_provider'] != 'gcp':
         try:
             sudo('echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] https://packages.cloud.google.com/apt '
@@ -98,7 +98,7 @@ def login_in_gcr(os_user, gcr_creds, odahu_image, dlab_path, cloud_provider):
         sudo('mkdir /home/{}/.docker'.format(os_user))
         sudo('cp /tmp/config /home/{}/.docker/config.json'.format(os_user))
         sudo('sed -i "s|ODAHU_IMAGE|{}|" {}sources/infrastructure-provisioning/src/general/files/{}/odahu_Dockerfile'
-             .format(odahu_image, dlab_path, cloud_provider))
+             .format(odahu_image, datalab_path, cloud_provider))
     except Exception as err:
         traceback.print_exc()
         print('Failed to prepare odahu image: ', str(err))
@@ -203,9 +203,9 @@ if __name__ == "__main__":
         sys.exit(1)
 
     print("Login in Google Container Registry")
-    login_in_gcr(args.os_user, args.gcr_creds, args.odahu_image, args.dlab_path, args.cloud_provider)
+    login_in_gcr(args.os_user, args.gcr_creds, args.odahu_image, args.datalab_path, args.cloud_provider)
 
-    print("Building dlab images")
+    print("Building Datalab images")
     count = 0
     while not build_docker_images(deeper_config, args.region, args.datalab_path) and count < 5:
         count += 1


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org