You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@dlab.apache.org by ad...@apache.org on 2019/09/19 15:39:35 UTC
[incubator-dlab] 01/01: Added new changes to endpoint for GCP.
This is an automated email from the ASF dual-hosted git repository.
adamsd pushed a commit to branch DLAB-terraform-endpoint-gcp
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git
commit 31852e8eac3fcacdbce85a0001f975c162af55c7
Author: AdamsDisturber <ad...@gmail.com>
AuthorDate: Thu Sep 19 18:39:14 2019 +0300
Added new changes to endpoint for GCP.
---
.../terraform/gcp/endpoint/daemon.json | 4 +-
.../terraform/gcp/endpoint/main/instance.tf | 14 +-
.../terraform/gcp/endpoint/main/network.tf | 4 +-
.../terraform/gcp/endpoint/main/variables.tf | 24 +-
.../terraform/gcp/endpoint/provisioning.py | 360 ++++++++++++++-------
.../terraform/gcp/endpoint/provisioning.yml | 47 ++-
6 files changed, 284 insertions(+), 169 deletions(-)
diff --git a/infrastructure-provisioning/terraform/gcp/endpoint/daemon.json b/infrastructure-provisioning/terraform/gcp/endpoint/daemon.json
index 94cf208..7dc7f46 100644
--- a/infrastructure-provisioning/terraform/gcp/endpoint/daemon.json
+++ b/infrastructure-provisioning/terraform/gcp/endpoint/daemon.json
@@ -1,5 +1,5 @@
{
+ DNS_IP_RESOLVE
"insecure-registries": ["REPOSITORY"],
- "disable-legacy-registry": true,
- "dns": ["8.8.8.8", "8.8.4.4"]
+ "disable-legacy-registry": true
}
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/gcp/endpoint/main/instance.tf b/infrastructure-provisioning/terraform/gcp/endpoint/main/instance.tf
index e2c8eef..52e0a5d 100644
--- a/infrastructure-provisioning/terraform/gcp/endpoint/main/instance.tf
+++ b/infrastructure-provisioning/terraform/gcp/endpoint/main/instance.tf
@@ -21,6 +21,7 @@
locals {
endpoint_instance_name = "${var.service_base_name}-${var.endpoint_id}-endpoint"
+ endpoint_instance_ip = "${var.service_base_name}-${var.endpoint_id}-static-ip"
}
resource "google_compute_instance" "endpoint" {
@@ -28,9 +29,10 @@ resource "google_compute_instance" "endpoint" {
machine_type = var.endpoint_shape
tags = ["${replace("${local.endpoint_instance_name}", "_", "-")}"]
labels = {
- name = "${local.endpoint_instance_name}"
- sbn = "${var.service_base_name}"
- product = "${var.product}"
+ name = "${local.endpoint_instance_name}"
+ sbn = "${var.service_base_name}"
+ product = "${var.product}"
+ endpoint_id = "${var.endpoint_id}"
}
zone = var.zone
@@ -54,7 +56,11 @@ resource "google_compute_instance" "endpoint" {
network = data.google_compute_network.endpoint_vpc_data.name
subnetwork = data.google_compute_subnetwork.endpoint_subnet_data.name
access_config {
- nat_ip = var.endpoint_eip
+ nat_ip = google_compute_address.static.address
}
}
}
+
+resource "google_compute_address" "static" {
+ name = local.endpoint_instance_ip
+}
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/gcp/endpoint/main/network.tf b/infrastructure-provisioning/terraform/gcp/endpoint/main/network.tf
index 417760e..e989aa5 100644
--- a/infrastructure-provisioning/terraform/gcp/endpoint/main/network.tf
+++ b/infrastructure-provisioning/terraform/gcp/endpoint/main/network.tf
@@ -49,11 +49,10 @@ data "google_compute_subnetwork" "endpoint_subnet_data" {
}
resource "google_compute_firewall" "firewall-ingress" {
- count = var.vpc_name == "" ? 1 : 0
name = local.firewall_ingress_name
network = data.google_compute_network.endpoint_vpc_data.name
allow {
- protocol = "all"
+ protocol = "tcp"
ports = ["22", "8084", "8085"]
}
target_tags = ["${var.service_base_name}-${var.endpoint_id}-endpoint"]
@@ -62,7 +61,6 @@ resource "google_compute_firewall" "firewall-ingress" {
}
resource "google_compute_firewall" "firewall-egress" {
- count = var.vpc_name == "" ? 1 : 0
name = local.firewall_egress_name
network = data.google_compute_network.endpoint_vpc_data.name
direction = "EGRESS"
diff --git a/infrastructure-provisioning/terraform/gcp/endpoint/main/variables.tf b/infrastructure-provisioning/terraform/gcp/endpoint/main/variables.tf
index 41f6964..5697f8a 100644
--- a/infrastructure-provisioning/terraform/gcp/endpoint/main/variables.tf
+++ b/infrastructure-provisioning/terraform/gcp/endpoint/main/variables.tf
@@ -18,25 +18,25 @@
# under the License.
#
# ******************************************************************************
-# id of gcp project
+
variable "project_name" {
- default = ""
+ default = "or2-msq-epmc-dlab-t1iylu"
}
-# path to .json file with creds
+
variable "creds_file" {
- default = ""
+ default = "../../service_account.json"
}
variable "endpoint_shape" {
default = "n1-standard-2"
}
-# for example <us-west1>
+
variable "region" {
- default = ""
+ default = "us-west1"
}
-# for example <us-west1-a>
+
variable "zone" {
- default = ""
+ default = "us-west1-a"
}
variable "service_base_name" {
@@ -66,14 +66,10 @@ variable "endpoint_volume_size" {
variable "subnet_cidr" {
default = "172.31.0.0/24"
}
-# TEMPORARY
+
variable "firewall_ing_cidr_range" {
default = "0.0.0.0/0"
}
-# created by ssn (bcs of certs)
-variable "endpoint_eip" {
- default = ""
-}
variable "firewall_eg_cidr_range" {
default = "0.0.0.0/0"
@@ -134,7 +130,7 @@ variable "endpoint_roles" {
"roles/bigquery.jobUser"
]
}
-# path for public key to connect to instance
+
variable "path_to_pub_key" {
default = ""
}
diff --git a/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.py b/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.py
index 09e61d7..32f3b63 100644
--- a/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.py
+++ b/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.py
@@ -22,8 +22,7 @@ def create_user():
conn.sudo('useradd -m -G {1} -s /bin/bash {0}'
.format(args.os_user, sudo_group))
conn.sudo(
- 'bash -c \'echo "{} ALL = NOPASSWD:ALL" >> /etc/sudoers\''
- .format(args.os_user, initial_user))
+ 'bash -c \'echo "{} ALL = NOPASSWD:ALL" >> /etc/sudoers\''.format(args.os_user, initial_user))
conn.sudo('mkdir /home/{}/.ssh'.format(args.os_user))
conn.sudo('chown -R {0}:{0} /home/{1}/.ssh/'
.format(initial_user, args.os_user))
@@ -44,12 +43,10 @@ def create_user():
def copy_keys():
try:
- conn.put(args.pkey, '/tmp/')
- conn.sudo('mv /tmp/{0} /home/{1}/keys/'
- .format(args.key_name, args.os_user))
+ conn.put(args.pkey, '/home/{0}/keys/'.format(args.os_user))
conn.sudo('chown -R {0}:{0} /home/{0}/keys'.format(args.os_user))
except Exception as err:
- logging.error('Failed to copy keys ', str(err))
+ logging.error('Failed to copy admin key: ', str(err))
traceback.print_exc()
sys.exit(1)
@@ -88,8 +85,7 @@ def ensure_logs_endpoint():
def ensure_jre_jdk_endpoint():
try:
- if not exists(conn, '/home/{}/.ensure_dir/jre_jdk_ensured'
- .format(args.os_user)):
+ if not exists(conn, '/home/{}/.ensure_dir/jre_jdk_ensured'.format(args.os_user)):
conn.sudo('apt-get install -y openjdk-8-jre-headless')
conn.sudo('apt-get install -y openjdk-8-jdk-headless')
conn.sudo('touch /home/{}/.ensure_dir/jre_jdk_ensured'
@@ -102,8 +98,7 @@ def ensure_jre_jdk_endpoint():
def ensure_supervisor_endpoint():
try:
- if not exists(conn, '/home/{}/.ensure_dir/superv_ensured'
- .format(args.os_user)):
+ if not exists(conn, '/home/{}/.ensure_dir/superv_ensured'.format(args.os_user)):
conn.sudo('apt-get -y install supervisor')
conn.sudo('update-rc.d supervisor defaults')
conn.sudo('update-rc.d supervisor enable')
@@ -117,8 +112,7 @@ def ensure_supervisor_endpoint():
def ensure_docker_endpoint():
try:
- if not exists(conn, '/home/{}/.ensure_dir/docker_ensured'
- .format(args.os_user)):
+ if not exists(conn, '/home/{}/.ensure_dir/docker_ensured'.format(args.os_user)):
conn.sudo("bash -c "
"'curl -fsSL https://download.docker.com/linux/ubuntu/gpg"
" | apt-key add -'")
@@ -129,11 +123,6 @@ def ensure_docker_endpoint():
conn.sudo('apt-cache policy docker-ce')
conn.sudo('apt-get install -y docker-ce={}'
.format(args.docker_version))
- # dns_ip_resolve = (conn.run("systemd-resolve --status "
- # "| grep -A 5 'Current Scopes: DNS' "
- # "| grep 'DNS Servers:' "
- # "| awk '{print $3}'")
- # .stdout.rstrip("\n\r"))
if not exists(conn, '{}/tmp'.format(args.dlab_path)):
conn.run('mkdir -p {}/tmp'.format(args.dlab_path))
conn.put('./daemon.json',
@@ -142,8 +131,18 @@ def ensure_docker_endpoint():
.format(args.repository_address,
args.repository_port,
args.dlab_path))
- # conn.sudo('sed -i "s|DNS_IP_RESOLVE|{}|g" {}/tmp/daemon.json'
- # .format(dns_ip_resolve, args.dlab_path))
+ if args.cloud_provider == "aws":
+ dns_ip_resolve = (conn.run("systemd-resolve --status "
+ "| grep -A 5 'Current Scopes: DNS' "
+ "| grep 'DNS Servers:' "
+ "| awk '{print $3}'")
+ .stdout.rstrip("\n\r"))
+ conn.sudo('sed -i "s|DNS_IP_RESOLVE|\"dns\": [{0}],|g" {1}/tmp/daemon.json'
+ .format(dns_ip_resolve, args.dlab_path))
+ elif args.cloud_provider == "gcp":
+ dns_ip_resolve = ""
+ conn.sudo('sed -i "s|DNS_IP_RESOLVE||g" {1}/tmp/daemon.json'
+ .format(dns_ip_resolve, args.dlab_path))
conn.sudo('mv {}/tmp/daemon.json /etc/docker'
.format(args.dlab_path))
conn.sudo('usermod -a -G docker ' + args.os_user)
@@ -170,31 +169,39 @@ def create_key_dir_endpoint():
def configure_keystore_endpoint(os_user):
try:
- # conn.sudo('apt install -y gsutil')
- # conn.sudo('bash -c \'echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] '
- # 'https://packages.cloud.google.com/apt cloud-sdk main" | sudo tee -a '
- # '/etc/apt/sources.list.d/google-cloud-sdk.list \' ')
- # conn.sudo('apt install -y apt-transport-https ca-certificates')
- # conn.sudo('bash -c \'curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key '
- # '--keyring /usr/share/keyrings/cloud.google.gpg add - \' ')
- # conn.sudo('bash -c \'apt-get update && sudo apt-get install google-cloud-sdk \' ')
- if not exists(conn, '/home/' + args.os_user + '/keys/endpoint.keystore.jks'):
- conn.sudo('gsutil -m cp -r gs://{0}/dlab/certs/endpoint/endpoint.keystore.jks '
- '/home/{1}/keys/'
- .format(args.ssn_bucket_name, args.os_user))
- if not exists(conn, '/home/' + args.os_user + '/keys/dlab.crt'):
- conn.sudo('gsutil -m cp -r gs://{0}/dlab/certs/endpoint/endpoint.crt'
- ' /home/{1}/keys/'.format(args.ssn_bucket_name, args.os_user))
- if not exists(conn, '/home/' + args.os_user + '/keys/ssn.crt'):
- conn.sudo('gsutil -m cp -r '
- 'gs://{0}/dlab/certs/ssn/ssn.crt /home/{1}/keys/'
- .format(args.ssn_bucket_name, args.os_user))
+ # TEMPORARY COMENTED!!!
+ if args.cloud_provider == "aws":
+ conn.sudo('apt-get install -y awscli')
+ if not exists(conn, '/home/' + args.os_user + '/keys/endpoint.keystore.jks'):
+ conn.sudo('aws s3 cp s3://{0}/dlab/certs/endpoint/endpoint.keystore.jks '
+ '/home/{1}/keys/endpoint.keystore.jks'
+ .format(args.ssn_bucket_name, args.os_user))
+ if not exists(conn, '/home/' + args.os_user + '/keys/dlab.crt'):
+ conn.sudo('aws s3 cp s3://{0}/dlab/certs/endpoint/endpoint.crt'
+ ' /home/{1}/keys/endpoint.crt'.format(args.ssn_bucket_name, args.os_user))
+ # if not exists(conn, '/home/' + args.os_user + '/keys/ssn.crt'):
+ # conn.sudo('aws s3 cp '
+ # 's3://{0}/dlab/certs/ssn/ssn.crt /home/{1}/keys/ssn.crt'
+ # .format(args.ssn_bucket_name, args.os_user))
+ elif args.cloud_provider == "gcp":
+ if not exists(conn, '/home/' + args.os_user + '/keys/endpoint.keystore.jks'):
+ conn.sudo('gsutil -m cp -r gs://{0}/dlab/certs/endpoint/endpoint.keystore.jks '
+ '/home/{1}/keys/'
+ .format(args.ssn_bucket_name, args.os_user))
+ if not exists(conn, '/home/' + args.os_user + '/keys/dlab.crt'):
+ conn.sudo('gsutil -m cp -r gs://{0}/dlab/certs/endpoint/endpoint.crt'
+ ' /home/{1}/keys/'.format(args.ssn_bucket_name, args.os_user))
+ # if not exists(conn, '/home/' + args.os_user + '/keys/ssn.crt'):
+ # conn.sudo('gsutil -m cp -r '
+ # 'gs://{0}/dlab/certs/ssn/ssn.crt /home/{1}/keys/'
+ # .format(args.ssn_bucket_name, args.os_user))
if not exists(conn, '/home/' + args.os_user + '/.ensure_dir/cert_imported'):
conn.sudo('keytool -importcert -trustcacerts -alias dlab -file /home/{0}/keys/endpoint.crt -noprompt \
-storepass changeit -keystore {1}/lib/security/cacerts'.format(os_user, java_home))
- conn.sudo('keytool -importcert -trustcacerts -file /home/{0}/keys/ssn.crt -noprompt \
- -storepass changeit -keystore {1}/lib/security/cacerts'.format(os_user, java_home))
+ # conn.sudo('keytool -importcert -trustcacerts -file /home/{0}/keys/ssn.crt -noprompt \
+ # -storepass changeit -keystore {1}/lib/security/cacerts'.format(os_user, java_home))
conn.sudo('touch /home/' + args.os_user + '/.ensure_dir/cert_imported')
+ print("Certificates are imported.")
except Exception as err:
print('Failed to configure Keystore certificates: ', str(err))
traceback.print_exc()
@@ -204,8 +211,7 @@ def configure_keystore_endpoint(os_user):
def configure_supervisor_endpoint():
try:
if not exists(conn,
- '/home/{}/.ensure_dir/configure_supervisor_ensured'
- .format(args.os_user)):
+ '/home/{}/.ensure_dir/configure_supervisor_ensured'.format(args.os_user)):
supervisor_conf = '/etc/supervisor/conf.d/supervisor_svc.conf'
if not exists(conn, '{}/tmp'.format(args.dlab_path)):
conn.run('mkdir -p {}/tmp'.format(args.dlab_path))
@@ -235,14 +241,72 @@ def configure_supervisor_endpoint():
.format(java_home, dlab_conf_dir))
conn.sudo('sed -i "s|CLOUD_PROVIDER|{}|g" {}provisioning.yml'
.format(args.cloud_provider, dlab_conf_dir))
- conn.sudo('sed -i "s|SSN_NLB|{}|g" {}provisioning.yml'
- .format(args.ssn_k8s_nlb_dns_name, dlab_conf_dir))
- conn.sudo('sed -i "s|SSN_ALB|{}|g" {}provisioning.yml'
- .format(args.ssn_k8s_alb_dns_name, dlab_conf_dir))
+
+ conn.sudo('sed -i "s|MONGO_HOST|{}|g" {}provisioning.yml'
+ .format(args.mongo_host, dlab_conf_dir))
+ conn.sudo('sed -i "s|MONGO_PORT|{}|g" {}provisioning.yml'
+ .format(args.mongo_port, dlab_conf_dir))
+ conn.sudo('sed -i "s|SS_HOST|{}|g" {}provisioning.yml'
+ .format(args.ss_host, dlab_conf_dir))
+ conn.sudo('sed -i "s|SS_PORT|{}|g" {}provisioning.yml'
+ .format(args.ss_port, dlab_conf_dir))
+ conn.sudo('sed -i "s|KEYCLOACK_HOST|{}|g" {}provisioning.yml'
+ .format(args.keycloack_host, dlab_conf_dir))
+
conn.sudo('sed -i "s|CLIENT_SECRET|{}|g" {}provisioning.yml'
.format(args.keycloak_client_secret, dlab_conf_dir))
# conn.sudo('sed -i "s|MONGO_PASSWORD|{}|g" {}provisioning.yml'
# .format(args.mongo_password, dlab_conf_dir))
+ conn.sudo('sed -i "s|CONF_OS|{}|g" {}provisioning.yml'
+ .format(args.conf_os, dlab_conf_dir))
+ conn.sudo('sed -i "s|SERVICE_BASE_NAME|{}|g" {}provisioning.yml'
+ .format(args.service_base_name, dlab_conf_dir))
+ conn.sudo('sed -i "s|EDGE_INSTANCE_SIZE|{}|g" {}provisioning.yml'
+ .format(args.edge_instence_size, dlab_conf_dir))
+ conn.sudo('sed -i "s|SUBNET_ID|{}|g" {}provisioning.yml'
+ .format(args.subnet_id, dlab_conf_dir))
+ conn.sudo('sed -i "s|REGION|{}|g" {}provisioning.yml'
+ .format(args.region, dlab_conf_dir))
+ conn.sudo('sed -i "s|ZONE|{}|g" {}provisioning.yml'
+ .format(args.zone, dlab_conf_dir))
+ conn.sudo('sed -i "s|TAG_RESOURCE_ID|{}|g" {}provisioning.yml'
+ .format(args.tag_resource_id, dlab_conf_dir))
+ conn.sudo('sed -i "s|SG_IDS|{}|g" {}provisioning.yml'
+ .format(args.sg_ids, dlab_conf_dir))
+ conn.sudo('sed -i "s|SSN_INSTANCE_SIZE|{}|g" {}provisioning.yml'
+ .format(args.ssn_instance_size, dlab_conf_dir))
+ conn.sudo('sed -i "s|VPC2_ID|{}|g" {}provisioning.yml'
+ .format(args.vpc2_id, dlab_conf_dir))
+ conn.sudo('sed -i "s|SUBNET2_ID|{}|g" {}provisioning.yml'
+ .format(args.subnet2_id, dlab_conf_dir))
+ conn.sudo('sed -i "s|CONF_KEY_DIR|{}|g" {}provisioning.yml'
+ .format(args.conf_key_dir, dlab_conf_dir))
+ conn.sudo('sed -i "s|VPC_ID|{}|g" {}provisioning.yml'
+ .format(args.vpc_id, dlab_conf_dir))
+ conn.sudo('sed -i "s|PEERING_ID|{}|g" {}provisioning.yml'
+ .format(args.peering_id, dlab_conf_dir))
+ conn.sudo('sed -i "s|AZURE_RESOURCE_GROUP_NAME|{}|g" {}provisioning.yml'
+ .format(args.azure_resource_group_name, dlab_conf_dir))
+ conn.sudo('sed -i "s|AZURE_SSN_STORAGE_ACCOUNT_TAG|{}|g" {}provisioning.yml'
+ .format(args.azure_ssn_storage_account_tag, dlab_conf_dir))
+ conn.sudo('sed -i "s|AZURE_SHARED_STORAGE_ACCOUNT_TAG|{}|g" {}provisioning.yml'
+ .format(args.azure_shared_storage_account_tag, dlab_conf_dir))
+ conn.sudo('sed -i "s|AZURE_DATALAKE_TAG|{}|g" {}provisioning.yml'
+ .format(args.azure_datalake_tag, dlab_conf_dir))
+ conn.sudo('sed -i "s|AZURE_CLIENT_ID|{}|g" {}provisioning.yml'
+ .format(args.azure_client_id, dlab_conf_dir))
+ conn.sudo('sed -i "s|GCP_PROJECT_ID|{}|g" {}provisioning.yml'
+ .format(args.gcp_project_id, dlab_conf_dir))
+ conn.sudo('sed -i "s|LDAP_HOST|{}|g" {}provisioning.yml'
+ .format(args.ldap_host, dlab_conf_dir))
+ conn.sudo('sed -i "s|LDAP_DN|{}|g" {}provisioning.yml'
+ .format(args.ldap_dn, dlab_conf_dir))
+ conn.sudo('sed -i "s|LDAP_OU|{}|g" {}provisioning.yml'
+ .format(args.ldap_ou, dlab_conf_dir))
+ conn.sudo('sed -i "s|LDAP_USER_NAME|{}|g" {}provisioning.yml'
+ .format(args.ldap_user_name, dlab_conf_dir))
+ conn.sudo('sed -i "s|LDAP_USER_PASSWORD|{}|g" {}provisioning.yml'
+ .format(args.ldap_user_password, dlab_conf_dir))
conn.sudo('touch /home/{}/.ensure_dir/configure_supervisor_ensured'
.format(args.os_user))
except Exception as err:
@@ -259,12 +323,18 @@ def ensure_jar_endpoint():
web_path = '{}/webapp'.format(args.dlab_path)
if not exists(conn, web_path):
conn.run('mkdir -p {}'.format(web_path))
-
- conn.run('wget -P {} --user={} --password={} '
- 'https://{}/repository/packages/provisioning-service-'
- '2.1.jar --no-check-certificate'
- .format(web_path, args.repository_user,
- args.repository_pass, args.repository_address))
+ if args.cloud_provider == "aws":
+ conn.run('wget -P {} --user={} --password={} '
+ 'https://{}/repository/packages/aws/provisioning-service-'
+ '2.1.jar --no-check-certificate'
+ .format(web_path, args.repository_user,
+ args.repository_pass, args.repository_address))
+ elif args.cloud_provider == "gcp":
+ conn.run('wget -P {} --user={} --password={} '
+ 'https://{}/repository/packages/gcp/provisioning-service-'
+ '2.1.jar --no-check-certificate'
+ .format(web_path, args.repository_user,
+ args.repository_pass, args.repository_address))
conn.run('mv {0}/*.jar {0}/provisioning-service.jar'
.format(web_path))
conn.sudo('touch {}'.format(ensure_file))
@@ -283,6 +353,17 @@ def start_supervisor_endpoint():
sys.exit(1)
+def get_sources():
+ try:
+ conn.run("git clone https://github.com/apache/incubator-dlab.git {0}/sources".format(args.dlab_path))
+ if args.branch_name != "":
+ conn.run("cd {0}/sources && git checkout {1} && cd".format(args.dlab_path, args.branch_name))
+ except Exception as err:
+ logging.error('Failed to download sources: ', str(err))
+ traceback.print_exc()
+ sys.exit(1)
+
+
def pull_docker_images():
try:
ensure_file = ('/home/{}/.ensure_dir/docker_images_pulled'
@@ -293,77 +374,77 @@ def pull_docker_images():
args.repository_pass,
args.repository_address,
args.repository_port))
- conn.sudo('docker pull {}:{}/docker.dlab-base'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker pull {}:{}/docker.dlab-edge'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker pull {}:{}/docker.dlab-project'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker pull {}:{}/docker.dlab-jupyter'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker pull {}:{}/docker.dlab-rstudio'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker pull {}:{}/docker.dlab-zeppelin'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker pull {}:{}/docker.dlab-tensor'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker pull {}:{}/docker.dlab-tensor-rstudio'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker pull {}:{}/docker.dlab-deeplearning'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker pull {}:{}/docker.dlab-dataengine-service'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker pull {}:{}/docker.dlab-dataengine'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker tag {}:{}/docker.dlab-base docker.dlab-base'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker tag {}:{}/docker.dlab-edge docker.dlab-edge'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker tag {}:{}/docker.dlab-project docker.dlab-project'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker tag {}:{}/docker.dlab-jupyter docker.dlab-jupyter'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker tag {}:{}/docker.dlab-rstudio docker.dlab-rstudio'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker tag {}:{}/docker.dlab-zeppelin '
+ conn.sudo('docker pull {}:{}/docker.dlab-base-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker pull {}:{}/docker.dlab-edge-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker pull {}:{}/docker.dlab-project-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker pull {}:{}/docker.dlab-jupyter-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker pull {}:{}/docker.dlab-rstudio-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker pull {}:{}/docker.dlab-zeppelin-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker pull {}:{}/docker.dlab-tensor-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker pull {}:{}/docker.dlab-tensor-rstudio-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker pull {}:{}/docker.dlab-deeplearning-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker pull {}:{}/docker.dlab-dataengine-service-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker pull {}:{}/docker.dlab-dataengine-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker tag {}:{}/docker.dlab-base-{} docker.dlab-base'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker tag {}:{}/docker.dlab-edge-{} docker.dlab-edge'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker tag {}:{}/docker.dlab-project-{} docker.dlab-project'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker tag {}:{}/docker.dlab-jupyter-{} docker.dlab-jupyter'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker tag {}:{}/docker.dlab-rstudio-{} docker.dlab-rstudio'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker tag {}:{}/docker.dlab-zeppelin-{} '
'docker.dlab-zeppelin'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker tag {}:{}/docker.dlab-tensor docker.dlab-tensor'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker tag {}:{}/docker.dlab-tensor-rstudio '
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker tag {}:{}/docker.dlab-tensor-{} docker.dlab-tensor'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker tag {}:{}/docker.dlab-tensor-rstudio-{} '
'docker.dlab-tensor-rstudio'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker tag {}:{}/docker.dlab-deeplearning '
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker tag {}:{}/docker.dlab-deeplearning-{} '
'docker.dlab-deeplearning'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker tag {}:{}/docker.dlab-dataengine-service '
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker tag {}:{}/docker.dlab-dataengine-service-{} '
'docker.dlab-dataengine-service'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker tag {}:{}/docker.dlab-dataengine '
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker tag {}:{}/docker.dlab-dataengine-{} '
'docker.dlab-dataengine'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker rmi {}:{}/docker.dlab-base'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker rmi {}:{}/docker.dlab-edge'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker rmi {}:{}/docker.dlab-project'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker rmi {}:{}/docker.dlab-jupyter'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker rmi {}:{}/docker.dlab-rstudio'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker rmi {}:{}/docker.dlab-zeppelin'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker rmi {}:{}/docker.dlab-tensor'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker rmi {}:{}/docker.dlab-tensor-rstudio'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker rmi {}:{}/docker.dlab-deeplearning'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker rmi {}:{}/docker.dlab-dataengine-service'
- .format(args.repository_address, args.repository_port))
- conn.sudo('docker rmi {}:{}/docker.dlab-dataengine'
- .format(args.repository_address, args.repository_port))
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker rmi {}:{}/docker.dlab-base-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker rmi {}:{}/docker.dlab-edge-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker rmi {}:{}/docker.dlab-project-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker rmi {}:{}/docker.dlab-jupyter-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker rmi {}:{}/docker.dlab-rstudio-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker rmi {}:{}/docker.dlab-zeppelin-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker rmi {}:{}/docker.dlab-tensor-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker rmi {}:{}/docker.dlab-tensor-rstudio-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker rmi {}:{}/docker.dlab-deeplearning-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker rmi {}:{}/docker.dlab-dataengine-service-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
+ conn.sudo('docker rmi {}:{}/docker.dlab-dataengine-{}'
+ .format(args.repository_address, args.repository_port, args.cloud_provider))
conn.sudo('chown -R {0}:docker /home/{0}/.docker/'
.format(args.os_user))
conn.sudo('touch {}'.format(ensure_file))
@@ -376,15 +457,20 @@ def pull_docker_images():
def init_args():
global args
parser = argparse.ArgumentParser()
- parser.add_argument('--dlab_path', type=str, default='')
- parser.add_argument('--key_name', type=str, default='')
+ parser.add_argument('--dlab_path', type=str, default='/opt/dlab')
+ parser.add_argument('--key_name', type=str, default='', help='Name of admin key without .pem extension')
parser.add_argument('--endpoint_eip_address', type=str)
parser.add_argument('--pkey', type=str, default='')
parser.add_argument('--hostname', type=str, default='')
parser.add_argument('--os_user', type=str, default='dlab-user')
parser.add_argument('--cloud_provider', type=str, default='')
- parser.add_argument('--ssn_k8s_nlb_dns_name', type=str, default='')
- parser.add_argument('--ssn_k8s_alb_dns_name', type=str, default='')
+
+ parser.add_argument('--mongo_host', type=str, default='')
+ parser.add_argument('--mongo_port', type=str, default='27017')
+ parser.add_argument('--ss_host', type=str, default='')
+ parser.add_argument('--ss_port', type=str, default='8443')
+ parser.add_argument('--keycloack_host', type=str, default='')
+
# parser.add_argument('--mongo_password', type=str, default='')
parser.add_argument('--repository_address', type=str, default='')
parser.add_argument('--repository_port', type=str, default='')
@@ -395,6 +481,33 @@ def init_args():
parser.add_argument('--ssn_bucket_name', type=str, default='')
parser.add_argument('--endpoint_keystore_password', type=str, default='')
parser.add_argument('--keycloak_client_secret', type=str, default='')
+ parser.add_argument('--branch_name', type=str, default='DLAB-terraform') # change default
+
+ parser.add_argument('--conf_os', type=str, default='debian')
+ parser.add_argument('--service_base_name', type=str, default='')
+ parser.add_argument('--edge_instence_size', type=str, default='')
+ parser.add_argument('--subnet_id', type=str, default='')
+ parser.add_argument('--region', type=str, default='')
+ parser.add_argument('--zone', type=str, default='')
+ parser.add_argument('--tag_resource_id', type=str, default='')
+ parser.add_argument('--sg_ids', type=str, default='')
+ parser.add_argument('--ssn_instance_size', type=str, default='')
+ parser.add_argument('--vpc2_id', type=str, default='')
+ parser.add_argument('--subnet2_id', type=str, default='')
+ parser.add_argument('--conf_key_dir', type=str, default='/root/keys/', help='Should end by symbol /')
+ parser.add_argument('--vpc_id', type=str, default='')
+ parser.add_argument('--peering_id', type=str, default='')
+ parser.add_argument('--azure_resource_group_name', type=str, default='')
+ parser.add_argument('--azure_ssn_storage_account_tag', type=str, default='')
+ parser.add_argument('--azure_shared_storage_account_tag', type=str, default='')
+ parser.add_argument('--azure_datalake_tag', type=str, default='')
+ parser.add_argument('--azure_client_id', type=str, default='')
+ parser.add_argument('--gcp_project_id', type=str, default='')
+ parser.add_argument('--ldap_host', type=str, default='')
+ parser.add_argument('--ldap_dn', type=str, default='')
+ parser.add_argument('--ldap_ou', type=str, default='')
+ parser.add_argument('--ldap_user_name', type=str, default='')
+ parser.add_argument('--ldap_user_password', type=str, default='')
print(parser.parse_known_args())
args = parser.parse_known_args()[0]
@@ -481,6 +594,9 @@ def start_deploy():
logging.info("Ensure jar")
ensure_jar_endpoint()
+ logging.info("Downloading sources")
+ get_sources()
+
logging.info("Pulling docker images")
pull_docker_images()
diff --git a/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.yml b/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.yml
index b8b5a7d..ce7f518 100644
--- a/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.yml
+++ b/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.yml
@@ -36,23 +36,23 @@ devMode: ${DEV_MODE}
mongo:
- host: SSN_HOST
- port: 27017
+ host: MONGO_HOST
+ port: MONGO_PORT
username: admin
password: MONGO_PASSWORD
database: dlabdb
selfService:
protocol: https
- host: SSN_HOST
- port: 8443
+ host: SS_HOST
+ port: SS_PORT
jerseyClient:
timeout: 3s
connectionTimeout: 3s
securityService:
protocol: https
- host: SSN_HOST
+ host: DOESNT_MATTER
port: 8090
jerseyClient:
timeout: 20s
@@ -104,25 +104,25 @@ server:
archivedLogFilenamePattern: ${LOG_ROOT_DIR}/provisioning/request-provisioning-%d{yyyy-MM-dd}.log.gz
archivedFileCount: 10
applicationConnectors:
-# - type: http
- - type: https
+ - type: http
+# - type: https
port: 8084
- certAlias: dlab
- validateCerts: true
- keyStorePath: ${KEY_STORE_PATH}
- keyStorePassword: ${KEY_STORE_PASSWORD}
- trustStorePath: ${TRUST_STORE_PATH}
- trustStorePassword: ${TRUST_STORE_PASSWORD}
+# certAlias: dlab
+# validateCerts: true
+# keyStorePath: ${KEY_STORE_PATH}
+# keyStorePassword: ${KEY_STORE_PASSWORD}
+# trustStorePath: ${TRUST_STORE_PATH}
+# trustStorePassword: ${TRUST_STORE_PASSWORD}
adminConnectors:
-# - type: http
- - type: https
+ - type: http
+# - type: https
port: 8085
- certAlias: dlab
- validateCerts: true
- keyStorePath: ${KEY_STORE_PATH}
- keyStorePassword: ${KEY_STORE_PASSWORD}
- trustStorePath: ${TRUST_STORE_PATH}
- trustStorePassword: ${TRUST_STORE_PASSWORD}
+# certAlias: dlab
+# validateCerts: true
+# keyStorePath: ${KEY_STORE_PATH}
+# keyStorePassword: ${KEY_STORE_PASSWORD}
+# trustStorePath: ${TRUST_STORE_PATH}
+# trustStorePassword: ${TRUST_STORE_PASSWORD}
logging:
level: INFO
@@ -142,7 +142,7 @@ logging:
keycloakConfiguration:
realm: dlab
bearer-only: true
- auth-server-url: http://SSN_ALB/auth
+ auth-server-url: http://KEYCLOACK_HOST/auth
ssl-required: none
register-node-at-startup: true
register-node-period: 600
@@ -176,5 +176,4 @@ cloudProperties:
dn: LDAP_DN
ou: LDAP_OU
user: LDAP_USER_NAME
- password: LDAP_USER_PASSWORD
-
+ password: LDAP_USER_PASSWORD
\ No newline at end of file
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org