You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@dlab.apache.org by dm...@apache.org on 2019/12/16 15:47:00 UTC

[incubator-dlab] 01/01: [odahu-integration] Created Odahu template

This is an automated email from the ASF dual-hosted git repository.

dmysakovets pushed a commit to branch odahu-integration
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git

commit 0d485234fdd4bacd024c6112377a0049f3a09574
Author: Demyan Mysakovets <de...@gmail.com>
AuthorDate: Mon Dec 16 17:46:35 2019 +0200

    [odahu-integration] Created Odahu template
---
 .../src/general/files/gcp/odahu_Dockerfile         |  83 +++++++++++++++++
 .../src/general/files/gcp/odahu_description.json   |   8 ++
 .../src/general/scripts/gcp/odahu_deploy.py        |  90 ++++++++++++++++++
 .../src/general/scripts/gcp/odahu_prepare.py       |  72 +++++++++++++++
 .../src/general/scripts/gcp/odahu_terminate.py     |  56 ++++++++++++
 infrastructure-provisioning/src/odahu/fabfile.py   |  73 +++++++++++++++
 .../src/odahu/templates/profile.json               | 101 +++++++++++++++++++++
 7 files changed, 483 insertions(+)

diff --git a/infrastructure-provisioning/src/general/files/gcp/odahu_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/odahu_Dockerfile
new file mode 100644
index 0000000..ec6556c
--- /dev/null
+++ b/infrastructure-provisioning/src/general/files/gcp/odahu_Dockerfile
@@ -0,0 +1,83 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+FROM nexus.ailifecycle.org:443/odahu/odahu-flow-automation:1.0.0-dev1575376304448
+ARG OS
+ARG SRC_PATH
+
+# Install any .deb dependecies
+RUN	apt-get update && \
+    apt-get -y upgrade && \
+    apt-get -y install python-pip python-dev groff vim less git wget nano libssl-dev libffi-dev libffi6 && \
+    apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
+
+# Install any python dependencies
+RUN pip install -UI pip==9.0.3 && \
+    pip install boto3 backoff fabric==1.14.0 fabvenv  argparse ujson pycrypto google-api-python-client google-cloud-storage \
+    pyyaml google-auth-httplib2 oauth2client
+
+# Configuring ssh for user
+RUN mkdir -p /root/.ssh; echo "Host *" > /root/.ssh/config; \
+    echo "StrictHostKeyChecking no" >>  /root/.ssh/config; \
+    echo "UserKnownHostsFile=/dev/null" >> /root/.ssh/config; \
+    echo "GlobalKnownHostsFile=/dev/null" >> /root/.ssh/config; \
+    echo "ConnectTimeout=30" >> /root/.ssh/config
+
+# Configuring log directories
+RUN mkdir -p /response; chmod a+rwx /response && \
+    mkdir -p /logs/ssn; chmod a+rwx /logs/ssn && \
+    mkdir -p /logs/project; chmod a+rwx /logs/project && \
+    mkdir -p /logs/edge; chmod a+rwx /logs/edge && \
+    mkdir -p /logs/notebook; chmod a+rwx /logs/notebook && \
+    mkdir -p /logs/dataengine; chmod a+rwx /logs/dataengine && \
+    mkdir -p /logs/dataengine-service; chmod a+rwx /logs/dataengine-service
+
+# Copying all base scripts to docker
+ENV PROVISION_CONFIG_DIR /root/conf/
+ENV KEYFILE_DIR /root/keys/
+
+RUN mkdir -p /root/conf && \
+    mkdir -p /root/scripts && \
+    mkdir -p /root/templates && \
+    mkdir -p /root/files && \
+    mkdir -p /usr/lib/python2.7/dlab && \
+    mkdir -p /root/keys/.ssh
+
+COPY ${SRC_PATH}base/ /root
+COPY ${SRC_PATH}odahu/ /root
+COPY ${SRC_PATH}general/conf/* /root/conf/
+COPY ${SRC_PATH}general/api/*.py /bin/
+COPY ${SRC_PATH}general/scripts/gcp/common_* /root/scripts/
+COPY ${SRC_PATH}general/scripts/gcp/odahu_* /root/scripts/
+COPY ${SRC_PATH}general/lib/gcp/* /usr/lib/python2.7/dlab/
+COPY ${SRC_PATH}general/lib/os/${OS}/common_lib.py /usr/lib/python2.7/dlab/common_lib.py
+COPY ${SRC_PATH}general/lib/os/fab.py /usr/lib/python2.7/dlab/fab.py
+COPY ${SRC_PATH}general/files/os/${OS}/sources.list /root/files/
+COPY ${SRC_PATH}general/files/os/ivysettings.xml /root/templates/
+COPY ${SRC_PATH}general/files/os/local_endpoint.json /root/files/
+COPY ${SRC_PATH}project/templates/locations/ /root/locations/
+COPY ${SRC_PATH}odahu/
+
+RUN chmod a+x /root/*.py && \
+    chmod a+x /root/scripts/* && \
+    chmod a+x /bin/*.py
+
+ENTRYPOINT ["/root/entrypoint.py"]
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/files/gcp/odahu_description.json b/infrastructure-provisioning/src/general/files/gcp/odahu_description.json
new file mode 100644
index 0000000..160a2ec
--- /dev/null
+++ b/infrastructure-provisioning/src/general/files/gcp/odahu_description.json
@@ -0,0 +1,8 @@
+{
+  "template_name": "OdahuFlow cluster",
+  "description": "OdahuFlow cluster",
+  "environment_type": "computational",
+  "templates":
+  [
+  ]
+}
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/odahu_deploy.py b/infrastructure-provisioning/src/general/scripts/gcp/odahu_deploy.py
new file mode 100644
index 0000000..67acdc4
--- /dev/null
+++ b/infrastructure-provisioning/src/general/scripts/gcp/odahu_deploy.py
@@ -0,0 +1,90 @@
+#!/usr/bin/python
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+import logging
+import json
+import sys
+from dlab.fab import *
+from dlab.meta_lib import *
+from dlab.actions_lib import *
+import os
+
+if __name__ == "__main__":
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/project/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
+    print('Generating infrastructure names and tags')
+    odahu_conf = dict()
+    odahu_conf['project_id'] = (os.environ['gcp_project_id'])
+    odahu_conf['region'] = (os.environ['gcp_region'])
+    odahu_conf['service_base_name'] = (os.environ['conf_service_base_name']).lower().replace('_', '-')
+    odahu_conf['project_name'] = (os.environ['project_name']).lower().replace('_', '-')
+    odahu_conf['odahu_cluster_name'] = (os.environ['odahu_cluster_name']).lower().replace('_', '-')
+    odahu_conf['bucket_name'] = "{}-tfstate".format((os.environ['odahu_cluster_name']).lower().replace('_', '-'))
+    odahu_conf['static_address_name'] = "{}-nat-gw".format((os.environ['odahu_cluster_name']).lower().replace('_', '-'))
+    try:
+        if os.environ['gcp_vpc_name'] == '':
+            raise KeyError
+        else:
+            odahu_conf['vpc_name'] = os.environ['gcp_vpc_name']
+    except KeyError:
+        odahu_conf['vpc_name'] = odahu_conf['service_base_name'] + '-ssn-vpc'
+    odahu_conf['vpc_cidr'] = os.environ['conf_vpc_cidr']
+    odahu_conf['private_subnet_name'] = '{0}-{1}-subnet'.format(odahu_conf['service_base_name'],
+                                                               odahu_conf['project_name'])
+    odahu_conf['keycloak_realm'] = os.environ['keycloak_realm']
+    odahu_conf['keycloak_url'] = os.environ['keycloak_url']\
+    odahu_conf['oauth_client_id'] = os.environ['oauth_client_id']
+    odahu_conf['oauth_client_secret'] = os.environ['oauth_client_secret']
+    odahu_conf['oauth_coockie_secret'] = os.environ['oauth_coockie_secret']
+    odahu_conf['tls_crt'] = os.environ['tls_crt']
+    odahu_conf['tls_key'] = os.environ['tls_key']
+
+    try:
+        local("cp /root/templates/profile.json /tmp/")
+        local("sudo sed \'s|<PROJECT_ID>|{}|g\'".format(odahu_conf['project_id']))
+        local("sudo sed \'s|<CLUSTER_NAME>|{}|g\'".format(odahu_conf['odahu_cluster_name']))
+        local("sudo sed \'s|<REGION>|{}|g\'".format(odahu_conf['region']))
+        local("sudo sed \'s|<KEYCLOAK_REALM>|{}|g\'".format(odahu_conf['keycloak_realm']))
+        local("sudo sed \'s|<KEYCLOAK_URL>|{}|g\'".format(odahu_conf['keycloak_url']))
+        local("sudo sed \'s|<VPC_NAME>|{}|g\'".format(odahu_conf['vpc_name']))
+        local("sudo sed \'s|<SUBNET_NAME>|{}|g\'".format(odahu_conf['private_subnet_name']))
+        local("sudo sed \'s|<OAUTH_CLIENT_ID>|{}|g\'".format(odahu_conf['oauth_client_id']))
+        local("sudo sed \'s|<OAUTH_CLIENT_ID>|{}|g\'".format(odahu_conf['oauth_client_id']))
+        local("sudo sed \'s|<TLS_CRT>|{}|g\'".format(odahu_conf['oauth_client_id']))
+        local('cp /tmp/profile.json /')
+    except Exception as err:
+        traceback.print_exc()
+        append_result("Failed to configure parameter file.", str(err))
+        sys.exit(1)
+
+    try:
+        local('tf_runner create')
+    except Exception as err:
+        traceback.print_exc()
+        append_result("Failed to deploy Odahu cluster.", str(err))
+        sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/odahu_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/odahu_prepare.py
new file mode 100644
index 0000000..7428486
--- /dev/null
+++ b/infrastructure-provisioning/src/general/scripts/gcp/odahu_prepare.py
@@ -0,0 +1,72 @@
+#!/usr/bin/python
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+import logging
+import json
+import sys
+from dlab.fab import *
+from dlab.meta_lib import *
+from dlab.actions_lib import *
+import os
+
+if __name__ == "__main__":
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/project/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
+    print('Generating infrastructure names and tags')
+    odahu_conf = dict()
+    odahu_conf['service_base_name'] = (os.environ['conf_service_base_name']).lower().replace('_', '-')
+    odahu_conf['odahu_cluster_name'] = (os.environ['odahu_cluster_name']).lower().replace('_', '-')
+    odahu_conf['bucket_name'] = "{}-tfstate".format((os.environ['odahu_cluster_name']).lower().replace('_', '-'))
+    odahu_conf['static_address_name'] = "{}-nat-gw".format((os.environ['odahu_cluster_name']).lower().replace('_', '-'))
+
+
+    try:
+        logging.info('[CREATE STATE BUCKETS]')
+        print('[CREATE STATE BUCKETS]')
+        params = "--bucket_name {}".format(odahu_conf['bucket_name'])
+
+        try:
+            local("~/scripts/{}.py {}".format('common_create_bucket', params))
+        except:
+            traceback.print_exc()
+            raise Exception
+    except Exception as err:
+        print('Error: {0}'.format(err))
+        append_result("Unable to create bucket.", str(err))
+        sys.exit(1)
+
+    try:
+        logging.info('[CREATE NAT GATEWAY]')
+        print('[CREATE NAT GATEWAY]')
+        project_conf['static_ip'] = \
+            GCPMeta().get_static_address(odahu_conf['region'], odahu_conf['static_address_name'])['address']
+    except Exception as err:
+        print('Error: {0}'.format(err))
+        append_result("Unable to reserve static ip.", str(err))
+        GCPActions().remove_bucket(odahu_conf['bucket_name'])
+        sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/odahu_terminate.py b/infrastructure-provisioning/src/general/scripts/gcp/odahu_terminate.py
new file mode 100644
index 0000000..9cc4a5f
--- /dev/null
+++ b/infrastructure-provisioning/src/general/scripts/gcp/odahu_terminate.py
@@ -0,0 +1,56 @@
+#!/usr/bin/python
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+import logging
+import json
+import sys
+from dlab.fab import *
+from dlab.meta_lib import *
+from dlab.actions_lib import *
+import os
+
+
+if __name__ == "__main__":
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/project/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
+    print('Removing Odahu cluster')
+    try:
+        local('tf_runner destroy')
+    except Exception as err:
+        traceback.print_exc()
+        append_result("Failed to terminate Odahu cluster.", str(err))
+        sys.exit(1)
+
+    try:
+        buckets = GCPMeta().get_list_buckets(args.service_base_name)
+        if 'items' in buckets:
+            for i in buckets['items']:
+                GCPActions().remove_bucket(i['name'])
+    except Exception as err:
+        print('Error: {0}'.format(err))
+        sys.exit(1)
diff --git a/infrastructure-provisioning/src/odahu/fabfile.py b/infrastructure-provisioning/src/odahu/fabfile.py
new file mode 100644
index 0000000..68626bc
--- /dev/null
+++ b/infrastructure-provisioning/src/odahu/fabfile.py
@@ -0,0 +1,73 @@
+#!/usr/bin/python
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+import logging
+import json
+import sys
+from dlab.fab import *
+from dlab.meta_lib import *
+from dlab.actions_lib import *
+import os
+import uuid
+
+
+# Main function for provisioning Odahuflow cluster
+def run():
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'], os.environ['request_id'])
+    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
+    notebook_config = dict()
+    notebook_config['uuid'] = str(uuid.uuid4())[:5]
+
+    try:
+        params = "--uuid {}".format(notebook_config['uuid'])
+        local("~/scripts/{}.py {}".format('odahu_prepare', params))
+    except Exception as err:
+        traceback.print_exc()
+        append_result("Failed preparing Notebook node.", str(err))
+        sys.exit(1)
+
+    try:
+        params = "--uuid {}".format(notebook_config['uuid'])
+        local("~/scripts/{}.py {}".format('odahu_deploy', params))
+    except Exception as err:
+        traceback.print_exc()
+        append_result("Failed to deploy Odahuflow cluster.", str(err))
+        sys.exit(1)
+
+# Main function for Odahuflow cluster termination
+def terminate():
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'], os.environ['request_id'])
+    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+    try:
+        local("~/scripts/{}.py".format('odahu_terminate'))
+    except Exception as err:
+        traceback.print_exc()
+        append_result("Failed to terminate Odahuflow cluster.", str(err))
+        sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/odahu/templates/profile.json b/infrastructure-provisioning/src/odahu/templates/profile.json
new file mode 100644
index 0000000..cec9643
--- /dev/null
+++ b/infrastructure-provisioning/src/odahu/templates/profile.json
@@ -0,0 +1,101 @@
+{
+    "alert_slack_url": "",
+    "allowed_ips": [
+        "195.56.14.128/29",
+        "91.120.43.0/24",
+        "91.120.48.0/26",
+        "195.56.119.208/28",
+        "195.56.109.192/28",
+        "185.44.13.32/29",
+        "5.188.107.104/29",
+        "46.229.218.0/24",
+        "217.21.63.0/24",
+        "217.21.56.0/24",
+        "213.184.231.0/24",
+        "213.184.243.0/24",
+        "86.57.255.88/29",
+        "194.158.197.0/29",
+        "85.223.209.0/24",
+        "85.223.208.64/29",
+        "85.223.141.72/29",
+        "87.245.220.0/26",
+        "174.128.55.224/29",
+        "174.128.60.0/24",
+        "203.170.48.0/24",
+        "34.73.113.125/32",
+        "34.82.22.149/32"
+    ],
+    "aws_cidr": "",
+    "aws_profile": "",
+    "aws_route_table_id": "",
+    "aws_sg": "",
+    "aws_vpc_id": "",
+    "bastion_tag": "bastion",
+    "cloud_type": "gcp",
+    "cluster_context": "gke_<PROJECT_ID>_<REGION>_<CLUSTER_NAME>",
+    "cluster_name": "<CLUSTER_NAME>",
+    "cluster_type": "gcp/gke",
+    "config_context_auth_info": "gke_<PROJECT_ID>_<REGION>_<CLUSTER_NAME>",
+    "config_context_cluster": "gke_<PROJECT_ID>_<REGION>_<CLUSTER_NAME>",
+    "dns_zone_name": "ailifecycle-org",
+    "docker_password": "<docker_pass>",
+    "docker_repo": "nexus.ailifecycle.org/odahu",
+    "docker_user": "<docker_user>",
+    "enclave_jwt_secret": "<enclave_secret>",
+    "gcp_cidr": "172.31.0.0/24",
+    "git_examples_description": "Git repository with the Legion examples",
+    "git_examples_key": "<git_examples_key>",
+    "git_examples_reference": "origin/master",
+    "git_examples_uri": "git@github.com:odahu/odahu-examples.git",
+    "git_examples_web_ui_link": "https://github.com/odahu/odahu-examples",
+    "github_org_name": "legion-auth-gcp",
+    "gke_node_tag": "<CLUSTER_NAME>-gke-node",
+    "grafana_admin": "grafana_admin",
+    "grafana_pass": "grafana_password",
+    "infra_cidr": "",
+    "initial_node_count": "6",
+    "istio_helm_repo": "https://storage.googleapis.com/istio-release/releases/1.2.2/charts",
+    "k8s_version": "1.13.12",
+    "keycloak_realm": "<KEYCLOAK_REALM>",
+    "keycloak_realm_audience": "legion",
+    "keycloak_url": "<KEYCLOAK_URL>",
+    "data_bucket": "<CLUSTER_NAME>-data-bucket",
+    "helm_repo": "https://nexus.ailifecycle.org/repository/helm-main/",
+    "odahu_infra_version": "1.0.0-rc31",
+    "odahuflow_version": "1.0.0-rc29",
+    "location": "us-west1",
+    "mlflow_toolchain_version": "1.0.0-rc7",
+    "jupyterlab_version": "1.0.0-rc3",
+    "packager_version": "1.0.0-rc3",
+    "model_docker_url": "nexus-local.ailifecycle.org:8084",
+    "vpc_name": "<VPC_NAME>",
+    "network_name": "<VPC_NAME>",
+    "subnet_name": "<SUBNET_NAME>",
+    "node_locations": [
+        "us-west1-a",
+        "us-west1-b"
+    ],
+    "node_version": "1.13.12-gke.2",
+    "oauth_client_id": "<OAUTH_CLIENT_ID>",
+    "oauth_client_secret": "<OAUTH_CLIENT_SECRET>",
+    "oauth_cookie_secret": "<OAUTH_COOCKIE_SECRET>",
+    "oauth_scope": "openid profile email offline_access groups",
+    "pods_cidr": "10.41.0.0/17",
+    "project_id": "<PROJECT_ID>",
+    "region": "<REGIOND>",
+    "root_domain": "ailifecycle.org",
+    "service_cidr": "10.41.128.0/20",
+    "ssh_key": "<SSH_KEY>",
+    "test_oauth_auth_url": "http://52.11.45.11:8080/auth/realms/dlab/protocol/openid-connect/token",
+    "test_oauth_client_id": "odahu_test",
+    "test_oauth_client_secret": "<test_oauth_client_secret>",
+    "test_oauth_scope": "openid profile email offline_access groups",
+    "test_user_email": "test@ailifecycle.org",
+    "test_user_password": "testPasswordPlz!",
+    "tfstate_bucket": "<CLUSTER_NAME>-tfstate",
+    "tls_crt": "<TLS_CRT>",
+    "tls_key": "<TLS_KEY>",
+    "zone": "<ZONE>",
+    "dns_project_id": "or2-msq-epmd-legn-t1iylu",
+    "odahuflow_connection_decrypt_token": "<odahuflow_connection_decrypt_token>"
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org