You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by ol...@apache.org on 2022/03/25 13:38:18 UTC

[incubator-datalab] branch gcp-jupyter-highgpu-template-1 updated: removed conda template

This is an automated email from the ASF dual-hosted git repository.

olehmykolaishyn pushed a commit to branch gcp-jupyter-highgpu-template-1
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git


The following commit(s) were added to refs/heads/gcp-jupyter-highgpu-template-1 by this push:
     new d519627  removed conda template
d519627 is described below

commit d51962746f08b3495925667bd05a586405ca96f0
Author: oleh_mykolaishyn <ow...@gmail.com>
AuthorDate: Fri Mar 25 15:38:04 2022 +0200

    removed conda template
---
 .../edge/templates/locations/jupyter-conda.conf    |  29 --
 .../src/general/conf/datalab.ini                   |   2 +-
 .../src/general/files/gcp/jupyter-conda_Dockerfile |  54 ----
 .../files/gcp/jupyter-conda_description.json       |  18 --
 .../general/scripts/gcp/jupyter-conda_configure.py | 322 ---------------------
 .../src/general/scripts/gcp/ssn_configure.py       |   1 -
 .../src/jupyter-conda/fabfile.py                   | 255 ----------------
 .../scripts/configure_jupyter-conda_node.py        | 161 -----------
 .../project/templates/locations/jupyter-conda.conf |  29 --
 .../src/ssn/files/gcp/mongo_roles.json             |  10 -
 .../src/ssn/scripts/docker_build.py                |   1 -
 .../core/commands/CommandExecutorMock.java         |   1 -
 .../backendapi/domain/NotebookTemplate.java        |   1 -
 .../service/impl/LibraryServiceImpl.java           |   2 +-
 .../epam/datalab/backendapi/util/BillingUtils.java |   2 +-
 .../src/main/resources/mongo/gcp/mongo_roles.json  |  12 -
 .../create-environment.component.ts                |   4 -
 .../resources-grid/resources-grid.component.html   |   1 -
 18 files changed, 3 insertions(+), 902 deletions(-)

diff --git a/infrastructure-provisioning/src/edge/templates/locations/jupyter-conda.conf b/infrastructure-provisioning/src/edge/templates/locations/jupyter-conda.conf
deleted file mode 100644
index 56f98ed..0000000
--- a/infrastructure-provisioning/src/edge/templates/locations/jupyter-conda.conf
+++ /dev/null
@@ -1,29 +0,0 @@
-# *****************************************************************************
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# ******************************************************************************
-location ~* /{{ NAME }}/.* {
-    proxy_pass http://{{ IP }}:8888;
-    proxy_set_header Host $http_host;
-    proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
-    proxy_set_header X-Real-IP $remote_addr;
-    proxy_http_version 1.1;
-    proxy_set_header Upgrade $http_upgrade;
-    proxy_set_header Connection "upgrade";
-}
diff --git a/infrastructure-provisioning/src/general/conf/datalab.ini b/infrastructure-provisioning/src/general/conf/datalab.ini
index 23eb030..6d618db 100644
--- a/infrastructure-provisioning/src/general/conf/datalab.ini
+++ b/infrastructure-provisioning/src/general/conf/datalab.ini
@@ -393,7 +393,7 @@ slave_instance_spot_pct_price = 70
 ### Count of slave nodes for Data Engine
 # instance_count =
 ### Type of notebooks for creating Data Engine from notebook images
-image_notebooks = jupyter,jupyter-gpu,jupyter-conda,jupyterlab,rstudio,zeppelin,tensor,tensor-rstudio,deeplearning
+image_notebooks = jupyter,jupyter-gpu,jupyterlab,rstudio,zeppelin,tensor,tensor-rstudio,deeplearning
 ### Persent of RAM allocated for an operating system
 os_memory = 75
 ### Explicit allocation RAM for an operating system
diff --git a/infrastructure-provisioning/src/general/files/gcp/jupyter-conda_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/jupyter-conda_Dockerfile
deleted file mode 100644
index 610996c..0000000
--- a/infrastructure-provisioning/src/general/files/gcp/jupyter-conda_Dockerfile
+++ /dev/null
@@ -1,54 +0,0 @@
-# *****************************************************************************
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# ******************************************************************************
-
-
-FROM docker.datalab-base:latest
-
-ARG OS
-
-COPY jupyter-conda/ /root/
-COPY general/scripts/os/* /root/scripts/
-COPY general/scripts/gcp/jupyter-conda_* /root/scripts/
-COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python3.8/datalab/notebook_lib.py
-COPY general/templates/os/${OS}/jupyter-notebook.service /root/templates/
-COPY general/templates/os/${OS}/ungit.service /root/templates/
-COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
-COPY general/templates/os/pyspark_local_template.json /root/templates/
-COPY general/templates/os/py3spark_local_template.json /root/templates/
-COPY general/templates/os/pyspark_dataengine-service_template.json /root/templates/
-COPY general/templates/os/sparkmagic_config_template.json /root/templates/
-COPY general/templates/os/r_dataengine-service_template.json /root/templates/
-COPY general/templates/os/r_template.json /root/templates/
-COPY general/templates/os/run_template.sh /root/templates/
-COPY general/templates/os/toree_dataengine-service_* /root/templates/
-COPY general/templates/os/inactive.sh /root/templates/
-COPY general/templates/os/inactive.service /root/templates/
-COPY general/templates/os/inactive.timer /root/templates/
-COPY general/files/os/toree-assembly-0.5.0.jar /root/files/
-COPY general/files/os/toree_kernel.tar.gz /root/files/
-COPY general/templates/os/pyspark_dataengine_template.json /root/templates/
-COPY general/templates/os/r_dataengine_template.json /root/templates/
-COPY general/templates/os/toree_dataengine_template.json /root/templates/
-COPY general/templates/gcp/core-site.xml /root/templates/
-
-RUN chmod a+x /root/fabfile.py; \
-    chmod a+x /root/scripts/*
-
diff --git a/infrastructure-provisioning/src/general/files/gcp/jupyter-conda_description.json b/infrastructure-provisioning/src/general/files/gcp/jupyter-conda_description.json
deleted file mode 100644
index 7e09b50..0000000
--- a/infrastructure-provisioning/src/general/files/gcp/jupyter-conda_description.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
-  "exploratory_environment_shapes" :
-  {
-    "GPU" : [
-      {"Size": "S", "Description": "a2-highgpu-1g", "Type": "a2-highgpu-1g","Ram": "85 GB","Cpu": "12"}
-    ]
-  },
-  "exploratory_environment_versions" :
-  [
-    {
-      "template_name": "Jupyter notebook 6.1.6 with GPU and Anaconda",
-      "description": "Base image with jupyter node creation routines",
-      "environment_type": "exploratory",
-      "version": "jupyter_notebook_conda",
-      "vendor": "GCP"
-    }
-  ]
-}
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyter-conda_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyter-conda_configure.py
deleted file mode 100644
index 571107c..0000000
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyter-conda_configure.py
+++ /dev/null
@@ -1,322 +0,0 @@
-#!/usr/bin/python3
-
-# *****************************************************************************
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# ******************************************************************************
-
-import datalab.fab
-import datalab.actions_lib
-import datalab.meta_lib
-import json
-from datalab.logger import logging
-import os
-import sys
-import traceback
-import subprocess
-from fabric import *
-
-if __name__ == "__main__":
-    try:
-        GCPMeta = datalab.meta_lib.GCPMeta()
-        GCPActions = datalab.actions_lib.GCPActions()
-        notebook_config = dict()
-        try:
-            notebook_config['exploratory_name'] = (os.environ['exploratory_name']).replace('_', '-').lower()
-        except:
-            notebook_config['exploratory_name'] = ''
-        notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
-        notebook_config['instance_type'] = os.environ['gcp_notebook_instance_size']
-        notebook_config['key_name'] = os.environ['conf_key_name']
-        notebook_config['edge_user_name'] = (os.environ['edge_user_name'])
-        notebook_config['project_name'] = (os.environ['project_name']).replace('_', '-').lower()
-        notebook_config['project_tag'] = notebook_config['project_name']
-        notebook_config['endpoint_name'] = (os.environ['endpoint_name']).replace('_', '-').lower()
-        notebook_config['endpoint_tag'] = notebook_config['endpoint_name']
-        notebook_config['instance_name'] = '{0}-{1}-{2}-nb-{3}'.format(notebook_config['service_base_name'],
-                                                                       notebook_config['project_name'],
-                                                                       notebook_config['endpoint_name'],
-                                                                       notebook_config['exploratory_name'])
-        notebook_config['image_enabled'] = os.environ['conf_image_enabled']
-        notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
-        if notebook_config['shared_image_enabled'] == 'false':
-            notebook_config['expected_primary_image_name'] = '{}-{}-{}-{}-primary-image'.format(
-                notebook_config['service_base_name'], notebook_config['project_name'], notebook_config['endpoint_name'],
-                os.environ['application'])
-            notebook_config['expected_secondary_image_name'] = '{}-{}-{}-{}-secondary-image'.format(
-                notebook_config['service_base_name'], notebook_config['project_name'], notebook_config['endpoint_name'],
-                os.environ['application'])
-            notebook_config['image_labels'] = {"sbn": notebook_config['service_base_name'],
-                                               "endpoint_tag": notebook_config['endpoint_tag'],
-                                               "project_tag": notebook_config['project_tag'],
-                                               os.environ['conf_billing_tag_key']: os.environ['conf_billing_tag_value']}
-        else:
-            notebook_config['expected_primary_image_name'] = '{}-{}-{}-primary-image'.format(
-                notebook_config['service_base_name'], notebook_config['endpoint_name'], os.environ['application'])
-            notebook_config['expected_secondary_image_name'] = '{}-{}-{}-secondary-image'.format(
-                notebook_config['service_base_name'], notebook_config['endpoint_name'], os.environ['application'])
-            notebook_config['image_labels'] = {"sbn": notebook_config['service_base_name'],
-                                               "endpoint_tag": notebook_config['endpoint_tag'],
-                                               os.environ['conf_billing_tag_key']: os.environ['conf_billing_tag_value']}
-        # generating variables regarding EDGE proxy on Notebook instance
-        instance_hostname = GCPMeta.get_private_ip_address(notebook_config['instance_name'])
-        edge_instance_name = '{0}-{1}-{2}-edge'.format(notebook_config['service_base_name'],
-                                                       notebook_config['project_name'],
-                                                       notebook_config['endpoint_name'])
-        edge_instance_hostname = GCPMeta.get_instance_public_ip_by_name(edge_instance_name)
-        edge_instance_private_ip = GCPMeta.get_private_ip_address(edge_instance_name)
-        notebook_config['ssh_key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
-        notebook_config['datalab_ssh_user'] = os.environ['conf_os_user']
-        notebook_config['zone'] = os.environ['gcp_zone']
-        notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
-        if "gcp_wrapped_csek" in os.environ:
-            notebook_config['gcp_wrapped_csek'] = os.environ['gcp_wrapped_csek']
-        else:
-            notebook_config['gcp_wrapped_csek'] = ''
-    except Exception as err:
-        datalab.fab.append_result("Failed to generate variables dictionary", str(err))
-        GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
-        sys.exit(1)
-
-    try:
-        if os.environ['conf_os_family'] == 'debian':
-            notebook_config['initial_user'] = 'ubuntu'
-            notebook_config['sudo_group'] = 'sudo'
-        if os.environ['conf_os_family'] == 'redhat':
-            notebook_config['initial_user'] = 'ec2-user'
-            notebook_config['sudo_group'] = 'wheel'
-
-        logging.info('[CREATING DATALAB SSH USER]')
-        params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
-            instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
-            notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
-
-        try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
-        except:
-            traceback.print_exc()
-            raise Exception
-    except Exception as err:
-        datalab.fab.append_result("Failed creating ssh user 'datalab'.", str(err))
-        GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
-        sys.exit(1)
-
-    # configuring proxy on Notebook instance
-    try:
-        logging.info('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
-        additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
-        params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
-            .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
-                    json.dumps(additional_config), notebook_config['datalab_ssh_user'])
-        try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
-        except:
-            traceback.print_exc()
-            raise Exception
-    except Exception as err:
-        datalab.fab.append_result("Failed to configure proxy.", str(err))
-        GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
-        sys.exit(1)
-
-    # updating repositories & installing python packages
-    try:
-        logging.info('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
-        params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
-            format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
-                   os.environ['gcp_region'], edge_instance_private_ip)
-        try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
-        except:
-            traceback.print_exc()
-            raise Exception
-    except Exception as err:
-        datalab.fab.append_result("Failed installing apps: apt & pip.", str(err))
-        GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
-        sys.exit(1)
-
-    # installing and configuring jupiter and all dependencies
-    try:
-        logging.info('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
-        params = "--hostname {} --keyfile {} " \
-                 "--region {} --spark_version {} " \
-                 "--hadoop_version {} --os_user {} " \
-                 "--scala_version {} " \
-                 "--exploratory_name {} "\
-                 "--edge_ip {}".\
-            format(instance_hostname, notebook_config['ssh_key_path'],
-                   os.environ['gcp_region'], os.environ['notebook_spark_version'],
-                   os.environ['notebook_hadoop_version'], notebook_config['datalab_ssh_user'],
-                   os.environ['notebook_scala_version'],
-                   notebook_config['exploratory_name'], edge_instance_private_ip)
-        try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_jupyter-conda_node', params), shell=True, check=True)
-        except:
-            traceback.print_exc()
-            raise Exception
-    except Exception as err:
-        datalab.fab.append_result("Failed to configure jupyter.", str(err))
-        GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
-        sys.exit(1)
-
-    try:
-        logging.info('[INSTALLING USERs KEY]')
-        additional_config = {"user_keyname": os.environ['project_name'],
-                             "user_keydir": os.environ['conf_key_dir']}
-        params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
-            instance_hostname, notebook_config['ssh_key_path'], json.dumps(additional_config),
-            notebook_config['datalab_ssh_user'])
-        try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
-        except:
-            datalab.fab.append_result("Failed installing users key")
-            raise Exception
-    except Exception as err:
-        datalab.fab.append_result("Failed installing users key.", str(err))
-        GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
-        sys.exit(1)
-
-    try:
-        logging.info('[SETUP USER GIT CREDENTIALS]')
-        params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
-            .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
-        try:
-            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True, check=True)
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
-        except:
-            datalab.fab.append_result("Failed setup git credentials")
-            raise Exception
-    except Exception as err:
-        datalab.fab.append_result("Failed to setup git credentials.", str(err))
-        GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
-        sys.exit(1)
-
-    # installing gpu
-    try:
-        logging.info('[INSTALLING GPU DRIVERS]')
-        params = "--hostname {} --keyfile {} --os_user {}".format(
-            instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'])
-        try:
-            subprocess.run("~/scripts/{}.py {}".format('common_install_gpu', params), shell=True, check=True)
-        except:
-            datalab.fab.append_result("Failed installing gpu drivers")
-            raise Exception
-
-    except Exception as err:
-        datalab.fab.append_result("Failed to install GPU drivers.", str(err))
-        GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
-        sys.exit(1)
-
-
-    if notebook_config['image_enabled'] == 'true':
-        try:
-            logging.info('[CREATING IMAGE]')
-            primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
-            if primary_image_id == '':
-                logging.info("Looks like it's first time we configure notebook server. Creating images.")
-                image_id_list = GCPActions.create_image_from_instance_disks(
-                    notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
-                    notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'],
-                    notebook_config['gcp_wrapped_csek'])
-                if image_id_list and image_id_list[0] != '':
-                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
-                else:
-                    logging.info("Looks like another image creating operation for your template have been started a "
-                          "moment ago.")
-                if image_id_list and image_id_list[1] != '':
-                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
-        except Exception as err:
-            datalab.fab.append_result("Failed creating image.", str(err))
-            GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
-            GCPActions.remove_image(notebook_config['expected_primary_image_name'])
-            GCPActions.remove_image(notebook_config['expected_secondary_image_name'])
-            sys.exit(1)
-
-
-    try:
-        logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
-        additional_info = {
-            'instance_hostname': instance_hostname,
-            'tensor': False
-        }
-        params = "--edge_hostname {} " \
-                 "--keyfile {} " \
-                 "--os_user {} " \
-                 "--type {} " \
-                 "--exploratory_name {} " \
-                 "--additional_info '{}'"\
-            .format(edge_instance_hostname,
-                    notebook_config['ssh_key_path'],
-                    notebook_config['datalab_ssh_user'],
-                    'jupyter-gpu',
-                    notebook_config['exploratory_name'],
-                    json.dumps(additional_info))
-        try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
-        except:
-            datalab.fab.append_result("Failed edge reverse proxy template")
-            raise Exception
-    except Exception as err:
-        datalab.fab.append_result("Failed to set edge reverse proxy template.", str(err))
-        GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
-        sys.exit(1)
-
-    try:
-        # generating output information
-        ip_address = GCPMeta.get_private_ip_address(notebook_config['instance_name'])
-        jupyter_ip_url = "http://" + ip_address + ":8888/{}/".format(notebook_config['exploratory_name'])
-        ungit_ip_url = "http://" + ip_address + ":8085/{}-ungit/".format(notebook_config['exploratory_name'])
-        jupyter_notebook_access_url = "https://" + edge_instance_hostname + "/{}/".format(
-            notebook_config['exploratory_name'])
-        jupyter_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
-            notebook_config['exploratory_name'])
-        logging.info('[SUMMARY]')
-        logging.info("Instance name: {}".format(notebook_config['instance_name']))
-        logging.info("Private IP: {}".format(ip_address))
-        logging.info("Instance type: {}".format(notebook_config['instance_type']))
-        logging.info("Key name: {}".format(notebook_config['key_name']))
-        logging.info("User key name: {}".format(os.environ['project_name']))
-        logging.info("Jupyter URL: {}".format(jupyter_ip_url))
-        logging.info("Ungit URL: {}".format(ungit_ip_url))
-        logging.info("ReverseProxyNotebook".format(jupyter_notebook_access_url))
-        logging.info("ReverseProxyUngit".format(jupyter_ungit_access_url))
-        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
-            notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
-
-        with open("/root/result.json", 'w') as result:
-            res = {"hostname": ip_address,
-                   "ip": ip_address,
-                   "instance_id": notebook_config['instance_name'],
-                   "master_keyname": os.environ['conf_key_name'],
-                   "notebook_name": notebook_config['instance_name'],
-                   "Action": "Create new notebook server",
-                   "exploratory_url": [
-                       {"description": "Jupyter",
-                        "url": jupyter_notebook_access_url},
-                       {"description": "Ungit",
-                        "url": jupyter_ungit_access_url}#,
-                       #{"description": "Jupyter (via tunnel)",
-                       # "url": jupyter_ip_url},
-                       #{"description": "Ungit (via tunnel)",
-                       # "url": ungit_ip_url}
-                   ]}
-            result.write(json.dumps(res))
-    except Exception as err:
-        datalab.fab.append_result("Failed to generate output information", str(err))
-        GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
-        sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
index 93c79eb..f96c911 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
@@ -203,7 +203,6 @@ if __name__ == "__main__":
                              {"name": "edge", "tag": "latest"},
                              {"name": "jupyter", "tag": "latest"},
                              {"name": "jupyter-gpu", "tag": "latest"},
-                             {"name": "jupyter-conda", "tag": "latest"},
                              {"name": "jupyterlab", "tag": "latest"},
                              {"name": "rstudio", "tag": "latest"},
                              {"name": "zeppelin", "tag": "latest"},
diff --git a/infrastructure-provisioning/src/jupyter-conda/fabfile.py b/infrastructure-provisioning/src/jupyter-conda/fabfile.py
deleted file mode 100644
index 30a4e69..0000000
--- a/infrastructure-provisioning/src/jupyter-conda/fabfile.py
+++ /dev/null
@@ -1,255 +0,0 @@
-#!/usr/bin/python3
-
-# *****************************************************************************
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# ******************************************************************************
-
-import logging
-import os
-import sys
-import uuid
-import subprocess
-from datalab.actions_lib import *
-from datalab.fab import *
-from datalab.meta_lib import *
-
-
-# Main function for provisioning notebook server
-@task
-def run(ctx):
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    notebook_config = dict()
-    notebook_config['uuid'] = str(uuid.uuid4())[:5]
-
-    try:
-        params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True, check=True)
-    except Exception as err:
-        traceback.print_exc()
-        append_result("Failed preparing Notebook node.", str(err))
-        sys.exit(1)
-
-    try:
-        params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('jupyter-conda_configure', params), shell=True, check=True)
-    except Exception as err:
-        traceback.print_exc()
-        append_result("Failed configuring Notebook node.", str(err))
-        sys.exit(1)
-
-
-# Main function for terminating exploratory environment
-@task
-def terminate(ctx):
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-    try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True, check=True)
-    except Exception as err:
-        traceback.print_exc()
-        append_result("Failed terminating Notebook node.", str(err))
-        sys.exit(1)
-
-
-# Main function for stopping notebook server
-@task
-def stop(ctx):
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] +  "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-    try:
-        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True, check=True)
-    except Exception as err:
-        traceback.print_exc()
-        append_result("Failed stopping Notebook node.", str(err))
-        sys.exit(1)
-
-
-# Main function for starting notebook server
-@task
-def start(ctx):
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] +  "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    try:
-        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True, check=True)
-    except Exception as err:
-        traceback.print_exc()
-        append_result("Failed starting Notebook node.", str(err))
-        sys.exit(1)
-
-
-# Main function for configuring notebook server after deploying DataEngine service
-@task
-def configure(ctx):
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] +  "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    try:
-        if os.environ['conf_resource'] == 'dataengine-service':
-            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine-service'), shell=True, check=True)
-        elif os.environ['conf_resource'] == 'dataengine':
-            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True, check=True)
-    except Exception as err:
-        traceback.print_exc()
-        append_result("Failed configuring analytical tool on Notebook node.", str(err))
-        sys.exit(1)
-
-
-# Main function for installing additional libraries for notebook
-@task
-def install_libs(ctx):
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    try:
-        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True, check=True)
-    except Exception as err:
-        traceback.print_exc()
-        append_result("Failed installing additional libs for Notebook node.", str(err))
-        sys.exit(1)
-
-
-# Main function for get available libraries for notebook
-@task
-def list_libs(ctx):
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    try:
-        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True, check=True)
-    except Exception as err:
-        traceback.print_exc()
-        append_result("Failed get available libraries for notebook node.", str(err))
-        sys.exit(1)
-
-
-# Main function for manage git credentials on notebook
-@task
-def git_creds(ctx):
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    try:
-        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True, check=True)
-    except Exception as err:
-        traceback.print_exc()
-        append_result("Failed to manage git credentials for notebook node.", str(err))
-        sys.exit(1)
-
-
-# Main function for creating image from notebook
-@task
-def create_image(ctx):
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    try:
-        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True, check=True)
-    except Exception as err:
-        traceback.print_exc()
-        append_result("Failed to create image from notebook node.", str(err))
-        sys.exit(1)
-
-
-# Main function for deleting existing notebook image
-@task
-def terminate_image(ctx):
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True, check=True)
-    except Exception as err:
-        traceback.print_exc()
-        append_result("Failed to create image from notebook node.", str(err))
-        sys.exit(1)
-
-
-# Main function for reconfiguring Spark for notebook
-@task
-def reconfigure_spark(ctx):
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    try:
-        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True, check=True)
-    except Exception as err:
-        traceback.print_exc()
-        append_result("Failed to reconfigure Spark for Notebook node.", str(err))
-        sys.exit(1)
-
-# Main function for checking inactivity status
-@task
-def check_inactivity(ctx):
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    try:
-        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True, check=True)
-    except Exception as err:
-        traceback.print_exc()
-        append_result("Failed to check inactivity status.", str(err))
-        sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/jupyter-conda/scripts/configure_jupyter-conda_node.py b/infrastructure-provisioning/src/jupyter-conda/scripts/configure_jupyter-conda_node.py
deleted file mode 100644
index 74aa40f..0000000
--- a/infrastructure-provisioning/src/jupyter-conda/scripts/configure_jupyter-conda_node.py
+++ /dev/null
@@ -1,161 +0,0 @@
-#!/usr/bin/python3
-
-# *****************************************************************************
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# ******************************************************************************
-
-import argparse
-import os
-import sys
-from datalab.actions_lib import *
-from datalab.fab import *
-from datalab.notebook_lib import *
-
-parser = argparse.ArgumentParser()
-parser.add_argument('--hostname', type=str, default='')
-parser.add_argument('--keyfile', type=str, default='')
-parser.add_argument('--region', type=str, default='')
-parser.add_argument('--spark_version', type=str, default='')
-parser.add_argument('--hadoop_version', type=str, default='')
-parser.add_argument('--os_user', type=str, default='')
-parser.add_argument('--scala_version', type=str, default='')
-parser.add_argument('--ip_address', type=str, default='')
-parser.add_argument('--exploratory_name', type=str, default='')
-parser.add_argument('--edge_ip', type=str, default='')
-args = parser.parse_args()
-
-spark_version = args.spark_version
-hadoop_version = args.hadoop_version
-jupyter_version = os.environ['notebook_jupyter_version']
-python_venv_version = os.environ['notebook_python_venv_version']
-scala_link = "https://www.scala-lang.org/files/archive/"
-if args.region == 'cn-north-1':
-    spark_link = "http://mirrors.hust.edu.cn/apache/spark/spark-" + spark_version + "/spark-" + spark_version + \
-                 "-bin-hadoop" + hadoop_version + ".tgz"
-else:
-    spark_link = "https://archive.apache.org/dist/spark/spark-" + spark_version + "/spark-" + spark_version + \
-                 "-bin-hadoop" + hadoop_version + ".tgz"
-python_venv_path = '/opt/python/python{0}/bin/python{1}'.format(python_venv_version, python_venv_version[:3])
-pyspark_local_path_dir = '/home/' + args.os_user + '/.local/share/jupyter/kernels/pyspark_local/'
-py3spark_local_path_dir = '/home/' + args.os_user + '/.local/share/jupyter/kernels/py3spark_local/'
-jupyter_conf_file = '/home/' + args.os_user + '/.local/share/jupyter/jupyter_notebook_config.py'
-scala_kernel_path = '/usr/local/share/jupyter/kernels/apache_toree_scala/'
-r_kernels_dir = '/home/' + args.os_user + '/.local/share/jupyter/kernels/'
-jars_dir = '/opt/jars/'
-templates_dir = '/root/templates/'
-files_dir = '/root/files/'
-local_spark_path = '/opt/spark/'
-toree_link = 'https://dist.apache.org/repos/dist/dev/incubator/toree/0.5.0-incubating-rc1/toree-pip/toree-0.5.0.tar.gz'
-r_libs = ['R6', 'pbdZMQ={}'.format(os.environ['notebook_pbdzmq_version']), 'RCurl', 'reshape2', 'caTools={}'.format(os.environ['notebook_catools_version']), 'rJava', 'ggplot2']
-gitlab_certfile = os.environ['conf_gitlab_certfile']
-venv_libs = 'numpy scipy pandas scikit-learn python-git transformers==4.4.2 gensim==4.0.1 tokenizers==0.10.1 python-levenshtein==0.12.2'
-
-
-##############
-# Run script #
-##############
-if __name__ == "__main__":
-    print("Configure connections")
-    global conn
-    conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
-
-    # PREPARE DISK
-    print("Prepare .ensure directory")
-    try:
-        if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
-            conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
-    except:
-        sys.exit(1)
-    print("Mount additional volume")
-    prepare_disk(args.os_user)
-
-    # INSTALL LANGUAGES
-    print("Install Java")
-    ensure_jre_jdk(args.os_user)
-    print("Install Scala")
-    ensure_scala(scala_link, args.scala_version, args.os_user)
-    print("Install Python 3 modules")
-    ensure_python3_libraries(args.os_user)
-
-    # INSTALL PYTHON IN VIRTUALENV
-    print("Configure Python Virtualenv")
-    ensure_python_venv(python_venv_version)
-
-    # INSTALL JUPYTER NOTEBOOK
-    print("Install Jupyter")
-    configure_jupyter(args.os_user, jupyter_conf_file, templates_dir, jupyter_version, args.exploratory_name)
-
-    # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
-    print("Install local Spark")
-    ensure_local_spark(args.os_user, spark_link, spark_version, hadoop_version, local_spark_path)
-    local_spark_scala_version = conn.run(
-        'export PATH=$PATH:' + local_spark_path + 'bin/; spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"').stdout.replace(
-        '\n', '')
-    print("Install storage jars")
-    ensure_local_jars(args.os_user, jars_dir)
-    print("Configure local Spark")
-    configure_local_spark(jars_dir, templates_dir)
-
-    # INSTALL JUPYTER KERNELS
-    #print("Install pyspark local kernel for Jupyter")
-    #ensure_pyspark_local_kernel(args.os_user, pyspark_local_path_dir, templates_dir, spark_version)
-    print("Install py3spark local kernel for Jupyter")
-    ensure_py3spark_local_kernel(args.os_user, py3spark_local_path_dir, templates_dir, spark_version, python_venv_path, python_venv_version)
-    print("Install Toree-Scala kernel for Jupyter")
-    ensure_toree_local_kernel(args.os_user, toree_link, scala_kernel_path, files_dir, local_spark_scala_version, spark_version)
-    print("Install Anaconda and add kernel for Jupyter")
-    ensure_anaconda()
-
-    # INSTALL UNGIT
-    print("Install nodejs")
-    install_nodejs(args.os_user)
-    print("Install ungit")
-    install_ungit(args.os_user, args.exploratory_name, args.edge_ip)
-    if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
-        install_gitlab_cert(args.os_user, gitlab_certfile)
-
-    # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
-    install_inactivity_checker(args.os_user, args.ip_address)
-
-    # INSTALL OPTIONAL PACKAGES
-    print("Installing additional Python packages")
-    ensure_additional_python_libs(args.os_user)
-    print("Install Matplotlib")
-    ensure_matplot(args.os_user)
-    print("Install SBT")
-    ensure_sbt(args.os_user)
-    print("Install Breeze")
-    add_breeze_library_local(args.os_user)
-    if os.environ['conf_cloud_provider'] == 'gcp':
-        print('Installing Pytorch')
-        ensure_pytorch(args.os_user)
-
-    # INSTALL PIP PACKAGES
-    print("Install python venv required libs")
-    ensure_venv_libs(args.os_user, venv_libs)
-
-    #POST INSTALLATION PROCESS
-    print("Updating pyOpenSSL library")
-    update_pyopenssl_lib(args.os_user)
-    print("Removing unexisting kernels")
-    remove_unexisting_kernel(args.os_user)
-
-    conn.close()
diff --git a/infrastructure-provisioning/src/project/templates/locations/jupyter-conda.conf b/infrastructure-provisioning/src/project/templates/locations/jupyter-conda.conf
deleted file mode 100644
index 56f98ed..0000000
--- a/infrastructure-provisioning/src/project/templates/locations/jupyter-conda.conf
+++ /dev/null
@@ -1,29 +0,0 @@
-# *****************************************************************************
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# ******************************************************************************
-location ~* /{{ NAME }}/.* {
-    proxy_pass http://{{ IP }}:8888;
-    proxy_set_header Host $http_host;
-    proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
-    proxy_set_header X-Real-IP $remote_addr;
-    proxy_http_version 1.1;
-    proxy_set_header Upgrade $http_upgrade;
-    proxy_set_header Connection "upgrade";
-}
diff --git a/infrastructure-provisioning/src/ssn/files/gcp/mongo_roles.json b/infrastructure-provisioning/src/ssn/files/gcp/mongo_roles.json
index b08bc0e..18fdb22 100644
--- a/infrastructure-provisioning/src/ssn/files/gcp/mongo_roles.json
+++ b/infrastructure-provisioning/src/ssn/files/gcp/mongo_roles.json
@@ -110,16 +110,6 @@
     ]
   },
   {
-    "_id": "nbCreateJupyterConda",
-    "description": "Create Notebook Jupyter Gpu with Anaconda",
-    "exploratories": [
-      "docker.datalab-jupyter-conda"
-    ],
-    "groups": [
-      "$anyuser"
-    ]
-  },
-  {
     "_id": "nbCreateJupyterLab",
     "description": "Create Notebook JupyterLab",
     "exploratories": [
diff --git a/infrastructure-provisioning/src/ssn/scripts/docker_build.py b/infrastructure-provisioning/src/ssn/scripts/docker_build.py
index 4a1670e..ac23a2c 100644
--- a/infrastructure-provisioning/src/ssn/scripts/docker_build.py
+++ b/infrastructure-provisioning/src/ssn/scripts/docker_build.py
@@ -36,7 +36,6 @@ if sys.argv[1] == 'all':
         'project',
         'jupyter',
         'jupyter-gpu',
-        'jupyter-conda',
         'jupyterlab',
         'rstudio',
         'zeppelin',
diff --git a/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/core/commands/CommandExecutorMock.java b/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/core/commands/CommandExecutorMock.java
index 5395b30..a058060 100644
--- a/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/core/commands/CommandExecutorMock.java
+++ b/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/core/commands/CommandExecutorMock.java
@@ -79,7 +79,6 @@ public class CommandExecutorMock implements ICommandExecutor {
                     "docker.datalab-deeplearning:latest",
                     "docker.datalab-jupyter:latest",
                     "docker.datalab-jupyter-gpu:latest",
-                    "docker.datalab-jupyter-conda:latest",
                     "docker.datalab-jupyterlab:latest",
                     "docker.datalab-superset:latest",
                     "docker.datalab-rstudio:latest",
diff --git a/services/self-service/src/main/java/com/epam/datalab/backendapi/domain/NotebookTemplate.java b/services/self-service/src/main/java/com/epam/datalab/backendapi/domain/NotebookTemplate.java
index 317eb41..d4e0dda 100644
--- a/services/self-service/src/main/java/com/epam/datalab/backendapi/domain/NotebookTemplate.java
+++ b/services/self-service/src/main/java/com/epam/datalab/backendapi/domain/NotebookTemplate.java
@@ -27,7 +27,6 @@ import lombok.Getter;
 public enum NotebookTemplate {
     JUPYTER("Jupyter notebook 6.1.6"),
     JUPYTER_GPU("Jupyter notebook 6.1.6 with GPU"),
-    JUPYTER_CONDA("Jupyter notebook 6.1.6 with GPU and Anaconda"),
     JUPYTER_LAB("JupyterLab 0.35.6"),
     ZEPPELIN("Apache Zeppelin 0.9.1"),
     DEEP_LEARNING("Deep Learning  2.4"),
diff --git a/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/LibraryServiceImpl.java b/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/LibraryServiceImpl.java
index fbd9da3..8576243 100644
--- a/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/LibraryServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/LibraryServiceImpl.java
@@ -164,7 +164,7 @@ public class LibraryServiceImpl implements LibraryService {
             groups.addAll(Arrays.asList(GROUP_R_PKG, GROUP_JAVA));
         }
         if (isTemplateGroup(templateName, Stream.of(DEEP_LEARNING, TENSOR, TENSOR_GCP,
-                DEEP_LEARNING_GCP, DEEP_LEARNING_AWS, DEEP_LEARNING_AZURE, JUPYTER_GPU, JUPYTER_CONDA))) {
+                DEEP_LEARNING_GCP, DEEP_LEARNING_AWS, DEEP_LEARNING_AZURE, JUPYTER_GPU))) {
 
             groups.add(GROUP_JAVA);
         }
diff --git a/services/self-service/src/main/java/com/epam/datalab/backendapi/util/BillingUtils.java b/services/self-service/src/main/java/com/epam/datalab/backendapi/util/BillingUtils.java
index 5136bdc..df785a6 100644
--- a/services/self-service/src/main/java/com/epam/datalab/backendapi/util/BillingUtils.java
+++ b/services/self-service/src/main/java/com/epam/datalab/backendapi/util/BillingUtils.java
@@ -39,7 +39,7 @@ import static com.epam.datalab.dto.billing.BillingResourceType.*;
 
 @Slf4j
 public class BillingUtils {
-    private static final String[] AVAILABLE_NOTEBOOKS = {"zeppelin", "tensor-rstudio", "rstudio", "tensor", "superset", "jupyterlab", "jupyter", "jupyter-gpu", "jupyter-conda", "deeplearning"};
+    private static final String[] AVAILABLE_NOTEBOOKS = {"zeppelin", "tensor-rstudio", "rstudio", "tensor", "superset", "jupyterlab", "jupyter", "jupyter-gpu", "deeplearning"};
     private static final String[] BILLING_FILTERED_REPORT_HEADERS = {"DataLab ID", "Project", "DataLab Resource Type", "Status", "Shape", "Product", "Cost"};
     private static final String[] COMPLETE_REPORT_REPORT_HEADERS = {"DataLab ID", "User", "Project", "DataLab Resource Type", "Status", "Shape", "Product", "Cost"};
 
diff --git a/services/self-service/src/main/resources/mongo/gcp/mongo_roles.json b/services/self-service/src/main/resources/mongo/gcp/mongo_roles.json
index 0fc6306..a914f34 100644
--- a/services/self-service/src/main/resources/mongo/gcp/mongo_roles.json
+++ b/services/self-service/src/main/resources/mongo/gcp/mongo_roles.json
@@ -132,18 +132,6 @@
     ]
   },
   {
-    "_id": "nbCreateJupyterConda",
-    "description": "Create Notebook Jupyter Gpu with Anaconda",
-    "type": "NOTEBOOK",
-    "cloud": "GCP",
-    "exploratories": [
-      "docker.datalab-jupyter-conda"
-    ],
-    "groups": [
-      "$anyuser"
-    ]
-  },
-  {
     "_id": "nbCreateJupyterLab",
     "description": "Create Notebook JupyterLab",
     "type": "NOTEBOOK",
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/create-environment/create-environment.component.ts b/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/create-environment/create-environment.component.ts
index 0654434..d0b107e 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/create-environment/create-environment.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/create-environment/create-environment.component.ts
@@ -176,8 +176,6 @@ export class ExploratoryEnvironmentCreateComponent implements OnInit {
 
     if (this.selectedCloud === 'gcp' &&
         (template?.image === 'docker.datalab-jupyter' ||
-/**        template?.image === 'docker.datalab-jupyter-gpu' || */
-/**        template?.image === 'docker.datalab-jupyter-conda' || */
         template?.image === 'docker.datalab-deeplearning' ||
         template?.image === 'docker.datalab-tensor')) {
 
@@ -193,8 +191,6 @@ export class ExploratoryEnvironmentCreateComponent implements OnInit {
 
     if (template.exploratory_environment_versions[0].template_name.toLowerCase().indexOf('tensorflow') === -1
       && template.exploratory_environment_versions[0].template_name.toLowerCase().indexOf('deeplearning') === -1
-/**      && template.exploratory_environment_versions[0].template_name.toLowerCase().indexOf('jupyter-gpu') === -1
-      && template.exploratory_environment_versions[0].template_name.toLowerCase().indexOf('jupyter-conda') === -1 */
       && template.exploratory_environment_versions[0].template_name.toLowerCase().indexOf('deep learning') === -1
       && template.exploratory_environment_versions[0].template_name.toLowerCase().indexOf('data science') === -1
     ) {
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.html b/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.html
index 73fc403..d866a7b 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.html
@@ -265,7 +265,6 @@
                       *ngIf="element.status === 'running'
                       && element.image !== 'docker.datalab-superset'
                       && element.image !== 'docker.datalab-jupyterlab'
-                      && element.image !== 'docker.datalab-jupyter-conda'
                       && element.image !== 'docker.datalab-jupyter-gpu'"
                       matTooltip="Only one compute can be associated with analytical tool at a time"
                       matTooltipPosition="above"

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org