You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2022/03/10 16:12:55 UTC

[incubator-datalab] 02/02: [DATALAB]: reverted commit 2cbb95e9e4ca0a0d743231096c18b830290ee2df

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch develop
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit 3e478ffaf910d34fae0531da3e10e39a58501510
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Thu Mar 10 18:12:24 2022 +0200

    [DATALAB]: reverted commit 2cbb95e9e4ca0a0d743231096c18b830290ee2df
---
 integration-tests-cucumber/pom.xml                 | 133 ++++
 .../java/org/apache/datalab/dto/EndpointDTO.java   |  38 +
 .../org/apache/datalab/mongo/MongoDBHelper.java    |  33 +
 .../org/apache/datalab/util/JacksonMapper.java     |  35 +
 .../org/apache/datalab/util/PropertyHelper.java    |  42 ++
 .../src/test/java/datalab/Constants.java           |  24 +
 .../src/test/java/datalab/RunCucumberTest.java     |  29 +
 .../test/java/datalab/endpoint/EndpointSteps.java  | 116 +++
 .../src/test/java/datalab/login/LoginSteps.java    |  63 ++
 .../src/test/resources/config.properties           |  20 +
 .../src/test/resources/datalab/endpoint.feature    |  75 ++
 .../src/test/resources/datalab/login.feature       |  30 +
 integration-tests/README.MD                        |  80 +++
 .../deeplearning/deeplearning-notebook.json        |   7 +
 .../deeplearning/spark_cluster.json                |   8 +
 .../azure_templates/jupyter/jupyter-notebook.json  |   7 +
 .../azure_templates/jupyter/spark_cluster.json     |   8 +
 .../azure_templates/rstudio/rstudio-notebook.json  |   7 +
 .../azure_templates/rstudio/spark_cluster.json     |   8 +
 .../azure_templates/tensor/spark_cluster.json      |   8 +
 .../azure_templates/tensor/tensor-notebook.json    |   7 +
 .../azure_templates/zeppelin/spark_cluster.json    |   8 +
 .../zeppelin/zeppelin-notebook.json                |   7 +
 integration-tests/examples/config.properties       |  86 +++
 integration-tests/examples/copy_files.py           | 114 +++
 .../examples/ec2_templates/deeplearning/EMR.json   |  10 +
 .../ec2_templates/deeplearning/EMR_spot.json       |  12 +
 .../deeplearning/deeplearning-notebook.json        |   7 +
 .../ec2_templates/deeplearning/spark_cluster.json  |   8 +
 .../examples/ec2_templates/jupyter/EMR.json        |  10 +
 .../examples/ec2_templates/jupyter/EMR_spot.json   |  12 +
 .../ec2_templates/jupyter/jupyter-notebook.json    |   7 +
 .../ec2_templates/jupyter/spark_cluster.json       |   8 +
 .../examples/ec2_templates/rstudio/EMR.json        |  10 +
 .../examples/ec2_templates/rstudio/EMR_spot.json   |  12 +
 .../ec2_templates/rstudio/rstudio-notebook.json    |   7 +
 .../ec2_templates/rstudio/spark_cluster.json       |   8 +
 .../examples/ec2_templates/tensor/EMR.json         |  10 +
 .../examples/ec2_templates/tensor/EMR_spot.json    |  12 +
 .../ec2_templates/tensor/spark_cluster.json        |   8 +
 .../ec2_templates/tensor/tensor-notebook.json      |   7 +
 .../examples/ec2_templates/zeppelin/EMR.json       |  10 +
 .../examples/ec2_templates/zeppelin/EMR_spot.json  |  12 +
 .../ec2_templates/zeppelin/spark_cluster.json      |   8 +
 .../ec2_templates/zeppelin/zeppelin-notebook.json  |   7 +
 .../gcp_templates/deeplearning/dataproc.json       |  12 +
 .../deeplearning/deeplearning-notebook.json        |   7 +
 .../gcp_templates/deeplearning/spark_cluster.json  |   8 +
 .../examples/gcp_templates/jupyter/dataproc.json   |  12 +
 .../gcp_templates/jupyter/jupyter-notebook.json    |   7 +
 .../gcp_templates/jupyter/spark_cluster.json       |   8 +
 .../examples/gcp_templates/rstudio/dataproc.json   |  12 +
 .../gcp_templates/rstudio/rstudio-notebook.json    |   7 +
 .../gcp_templates/rstudio/spark_cluster.json       |   8 +
 .../examples/gcp_templates/tensor/dataproc.json    |  12 +
 .../gcp_templates/tensor/spark_cluster.json        |   8 +
 .../gcp_templates/tensor/tensor-notebook.json      |   7 +
 .../examples/gcp_templates/zeppelin/dataproc.json  |  12 +
 .../gcp_templates/zeppelin/spark_cluster.json      |   8 +
 .../gcp_templates/zeppelin/zeppelin-notebook.json  |   7 +
 .../scenario_deeplearning/deeplearning_tests.py    | 141 ++++
 .../examples/scenario_jupyter/jupyter_tests.py     | 102 +++
 .../examples/scenario_rstudio/rstudio_tests.py     |  95 +++
 .../examples/scenario_tensor/tensor_tests.py       |  91 +++
 .../examples/scenario_zeppelin/zeppelin_tests.py   | 180 +++++
 .../test_libs/deeplearning/lib_groups.json         |   5 +
 .../examples/test_libs/deeplearning/lib_list.json  |  14 +
 .../examples/test_libs/jupyter/lib_groups.json     |   6 +
 .../examples/test_libs/jupyter/lib_list.json       |  18 +
 .../examples/test_libs/lib_groups.json             |   5 +
 integration-tests/examples/test_libs/lib_list.json |   6 +
 .../examples/test_libs/rstudio/lib_groups.json     |   6 +
 .../examples/test_libs/rstudio/lib_list.json       |  18 +
 .../examples/test_libs/tensor/lib_groups.json      |   5 +
 .../examples/test_libs/tensor/lib_list.json        |  14 +
 .../examples/test_libs/zeppelin/lib_groups.json    |   6 +
 .../examples/test_libs/zeppelin/lib_list.json      |  19 +
 .../examples/test_templates/README.txt             |   2 +
 .../test_templates/deeplearning/cat_gray.jpg       | Bin 0 -> 92726 bytes
 .../test_templates/deeplearning/conv.prototxt      |  48 ++
 .../deeplearning/template_caffe.ipynb              |  99 +++
 .../deeplearning/template_caffe2.ipynb             | 619 ++++++++++++++++
 .../deeplearning/template_cntk.ipynb               |  78 ++
 .../deeplearning/template_keras.ipynb              |  64 ++
 .../deeplearning/template_mxnet.ipynb              | 108 +++
 .../deeplearning/template_preparation_tensor.ipynb | 285 ++++++++
 .../deeplearning/template_theano.ipynb             |  45 ++
 .../deeplearning/template_torch.ipynb              |  73 ++
 .../template_visualization_tensor.ipynb            | 139 ++++
 .../jupyter/template_preparation_pyspark.ipynb     | 198 ++++++
 .../jupyter/template_preparation_spark.ipynb       | 103 +++
 .../jupyter/template_preparation_sparkr.ipynb      | 111 +++
 .../jupyter/template_visualization_pyspark.ipynb   | 243 +++++++
 .../jupyter/template_visualization_sparkr.ipynb    | 249 +++++++
 .../test_templates/rstudio/template_preparation.r  |  44 ++
 .../rstudio/template_visualization.r               | 122 ++++
 .../tensor/template_preparation_tensor.ipynb       | 285 ++++++++
 .../tensor/template_visualization_tensor.ipynb     | 139 ++++
 .../zeppelin/template_preparation_pyspark.json     |   1 +
 .../zeppelin/template_preparation_spark.json       |   1 +
 .../zeppelin/template_preparation_sparkr.json      |   1 +
 .../zeppelin/template_visualization_pyspark.json   |   1 +
 .../zeppelin/template_visualization_sparkr.json    |   1 +
 .../cloud/VirtualMachineStatusChecker.java         | 131 ++++
 .../datalab/automation/cloud/aws/AmazonHelper.java | 153 ++++
 .../automation/cloud/aws/AmazonInstanceState.java  |  32 +
 .../automation/cloud/azure/AzureHelper.java        | 154 ++++
 .../datalab/automation/cloud/gcp/GcpHelper.java    | 245 +++++++
 .../automation/cloud/gcp/GcpInstanceState.java     |  33 +
 .../epam/datalab/automation/docker/AckStatus.java  |  53 ++
 .../com/epam/datalab/automation/docker/Bridge.java | 160 +++++
 .../com/epam/datalab/automation/docker/Docker.java | 111 +++
 .../datalab/automation/docker/DockerContainer.java | 171 +++++
 .../epam/datalab/automation/docker/HostConfig.java |  37 +
 .../com/epam/datalab/automation/docker/Labels.java |  23 +
 .../datalab/automation/docker/NetworkSettings.java |  36 +
 .../epam/datalab/automation/docker/Networks.java   |  34 +
 .../epam/datalab/automation/docker/SSHConnect.java | 136 ++++
 .../automation/exceptions/CloudException.java      |  29 +
 .../automation/exceptions/DockerException.java     |  29 +
 .../automation/exceptions/JenkinsException.java    |  29 +
 .../automation/exceptions/LoadFailException.java   |  29 +
 .../datalab/automation/helper/CloudHelper.java     | 219 ++++++
 .../datalab/automation/helper/CloudProvider.java   |  31 +
 .../automation/helper/ConfigPropertyValue.java     | 387 ++++++++++
 .../datalab/automation/helper/NamingHelper.java    | 283 ++++++++
 .../automation/helper/PropertiesResolver.java      | 217 ++++++
 .../datalab/automation/helper/WaitForStatus.java   | 205 ++++++
 .../com/epam/datalab/automation/http/ApiPath.java  |  70 ++
 .../epam/datalab/automation/http/ContentType.java  |  30 +
 .../epam/datalab/automation/http/HttpRequest.java  |  91 +++
 .../datalab/automation/http/HttpStatusCode.java    |  31 +
 .../jenkins/JenkinsConfigProperties.java           |  48 ++
 .../jenkins/JenkinsResponseElements.java           |  29 +
 .../datalab/automation/jenkins/JenkinsService.java | 225 ++++++
 .../datalab/automation/jenkins/JenkinsUrls.java    |  30 +
 .../automation/model/CreateNotebookDto.java        |  86 +++
 .../datalab/automation/model/DeployClusterDto.java |  67 ++
 .../automation/model/DeployDataProcDto.java        | 109 +++
 .../datalab/automation/model/DeployEMRDto.java     | 109 +++
 .../datalab/automation/model/DeploySparkDto.java   |  64 ++
 .../automation/model/ExploratoryImageDto.java      |  72 ++
 .../epam/datalab/automation/model/ImageDto.java    |  92 +++
 .../datalab/automation/model/JsonMapperDto.java    |  61 ++
 .../com/epam/datalab/automation/model/Lib.java     |  66 ++
 .../epam/datalab/automation/model/LoginDto.java    |  59 ++
 .../datalab/automation/model/NotebookConfig.java   | 209 ++++++
 .../src/main/resources/application.properties      |  36 +
 integration-tests/src/main/resources/log4j2.xml    |  63 ++
 .../epam/datalab/automation/test/TestCallable.java | 783 +++++++++++++++++++++
 .../automation/test/TestDataEngineService.java     | 419 +++++++++++
 .../epam/datalab/automation/test/TestServices.java | 327 +++++++++
 .../test/libs/LibraryNotFoundException.java        |  29 +
 .../datalab/automation/test/libs/LibsHelper.java   |  62 ++
 .../automation/test/libs/TestDescription.java      |  31 +
 .../automation/test/libs/TestLibGroupStep.java     | 104 +++
 .../automation/test/libs/TestLibInstallStep.java   | 162 +++++
 .../automation/test/libs/TestLibListStep.java      | 129 ++++
 .../datalab/automation/test/libs/TestLibStep.java  |  50 ++
 .../test/libs/models/LibInstallRequest.java        |  55 ++
 .../test/libs/models/LibSearchRequest.java         |  53 ++
 .../test/libs/models/LibStatusResponse.java        |  87 +++
 .../test/libs/models/LibToSearchData.java          |  37 +
 .../automation/test/libs/models/LibraryStatus.java |  95 +++
 integration-tests/src/test/resources/log4j2.xml    |  63 ++
 .../org.mockito.plugins.MockMaker                  |   1 +
 166 files changed, 12204 insertions(+)

diff --git a/integration-tests-cucumber/pom.xml b/integration-tests-cucumber/pom.xml
new file mode 100644
index 0000000..143bad0
--- /dev/null
+++ b/integration-tests-cucumber/pom.xml
@@ -0,0 +1,133 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <groupId>com.epam.datalab</groupId>
+    <artifactId>integration-tests</artifactId>
+    <version>1.0.0-SNAPSHOT</version>
+    <packaging>jar</packaging>
+
+    <properties>
+        <cucumber.version>4.2.6</cucumber.version>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-core</artifactId>
+            <version>2.11.0</version>
+        </dependency>
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-databind</artifactId>
+            <version>2.11.0</version>
+        </dependency>
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-annotations</artifactId>
+            <version>2.11.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.projectlombok</groupId>
+            <artifactId>lombok</artifactId>
+            <version>1.18.8</version>
+        </dependency>
+        <dependency>
+            <groupId>org.mongodb</groupId>
+            <artifactId>mongo-java-driver</artifactId>
+            <version>3.10.2</version>
+        </dependency>
+
+
+        <dependency>
+            <groupId>io.cucumber</groupId>
+            <artifactId>cucumber-java</artifactId>
+            <version>${cucumber.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>io.cucumber</groupId>
+            <artifactId>cucumber-junit</artifactId>
+            <version>${cucumber.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>4.12</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.jayway.restassured</groupId>
+            <artifactId>rest-assured</artifactId>
+            <version>2.9.0</version>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <configuration>
+                    <testFailureIgnore>true</testFailureIgnore>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>3.7.0</version>
+                <configuration>
+                    <encoding>UTF-8</encoding>
+                    <source>1.8</source>
+                    <target>1.8</target>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>net.masterthought</groupId>
+                <artifactId>maven-cucumber-reporting</artifactId>
+                <version>2.8.0</version>
+                <executions>
+                    <execution>
+                        <id>execution</id>
+                        <phase>verify</phase>
+                        <goals>
+                            <goal>generate</goal>
+                        </goals>
+                        <configuration>
+                            <projectName>CucumberWebGui</projectName>
+                            <outputDirectory>${project.build.directory}/cucumber-report-html</outputDirectory>
+                            <cucumberOutput>${project.build.directory}/cucumber.json</cucumberOutput>
+                            <skippedFails>true</skippedFails>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+</project>
diff --git a/integration-tests-cucumber/src/main/java/org/apache/datalab/dto/EndpointDTO.java b/integration-tests-cucumber/src/main/java/org/apache/datalab/dto/EndpointDTO.java
new file mode 100644
index 0000000..cddbbe0
--- /dev/null
+++ b/integration-tests-cucumber/src/main/java/org/apache/datalab/dto/EndpointDTO.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.datalab.dto;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import lombok.AllArgsConstructor;
+import lombok.Data;
+import lombok.NoArgsConstructor;
+
+@Data
+@JsonIgnoreProperties(ignoreUnknown = true)
+@AllArgsConstructor
+@NoArgsConstructor
+public class EndpointDTO {
+	private String name;
+	private String url;
+	private String account;
+	@JsonProperty("endpoint_tag")
+	private String tag;
+}
diff --git a/integration-tests-cucumber/src/main/java/org/apache/datalab/mongo/MongoDBHelper.java b/integration-tests-cucumber/src/main/java/org/apache/datalab/mongo/MongoDBHelper.java
new file mode 100644
index 0000000..3f1dab9
--- /dev/null
+++ b/integration-tests-cucumber/src/main/java/org/apache/datalab/mongo/MongoDBHelper.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.datalab.mongo;
+
+import com.mongodb.client.MongoClient;
+import com.mongodb.client.MongoClients;
+import org.apache.datalab.util.PropertyHelper;
+
+public class MongoDBHelper {
+    private static final MongoClient client = MongoClients
+            .create(PropertyHelper.read("mongo.connection.string"));
+
+    public static void cleanCollection(String collection) {
+        client.getDatabase(PropertyHelper.read("mongo.db.name")).getCollection(collection).drop();
+    }
+}
diff --git a/integration-tests-cucumber/src/main/java/org/apache/datalab/util/JacksonMapper.java b/integration-tests-cucumber/src/main/java/org/apache/datalab/util/JacksonMapper.java
new file mode 100644
index 0000000..7f9f2fd
--- /dev/null
+++ b/integration-tests-cucumber/src/main/java/org/apache/datalab/util/JacksonMapper.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.datalab.util;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public final class JacksonMapper {
+    private static final ObjectMapper MAPPER = new ObjectMapper();
+
+    public static <T> String marshall(T obj) {
+        try {
+            return MAPPER.writeValueAsString(obj);
+        } catch (JsonProcessingException e) {
+            throw new IllegalArgumentException(e);
+        }
+    }
+}
diff --git a/integration-tests-cucumber/src/main/java/org/apache/datalab/util/PropertyHelper.java b/integration-tests-cucumber/src/main/java/org/apache/datalab/util/PropertyHelper.java
new file mode 100644
index 0000000..65a03bc
--- /dev/null
+++ b/integration-tests-cucumber/src/main/java/org/apache/datalab/util/PropertyHelper.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.datalab.util;
+
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.util.Properties;
+
+public class PropertyHelper {
+
+    private final static Properties PROPERTIES;
+
+    static {
+        PROPERTIES = new Properties();
+        try (InputStream inputStream = new FileInputStream(System.getProperty("config.file"))) {
+            PROPERTIES.load(inputStream);
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    public static String read(String prop) {
+        return PROPERTIES.getProperty(prop);
+    }
+}
diff --git a/integration-tests-cucumber/src/test/java/datalab/Constants.java b/integration-tests-cucumber/src/test/java/datalab/Constants.java
new file mode 100644
index 0000000..375d614
--- /dev/null
+++ b/integration-tests-cucumber/src/test/java/datalab/Constants.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package datalab;
+
+public interface Constants {
+    String API_URI = "https://localhost:8443/api/";
+}
diff --git a/integration-tests-cucumber/src/test/java/datalab/RunCucumberTest.java b/integration-tests-cucumber/src/test/java/datalab/RunCucumberTest.java
new file mode 100644
index 0000000..4fbc70c
--- /dev/null
+++ b/integration-tests-cucumber/src/test/java/datalab/RunCucumberTest.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package datalab;
+
+import cucumber.api.CucumberOptions;
+import cucumber.api.junit.Cucumber;
+import org.junit.runner.RunWith;
+
+@RunWith(Cucumber.class)
+@CucumberOptions(plugin = {"json:target/cucumber.json"})
+public class RunCucumberTest {
+}
diff --git a/integration-tests-cucumber/src/test/java/datalab/endpoint/EndpointSteps.java b/integration-tests-cucumber/src/test/java/datalab/endpoint/EndpointSteps.java
new file mode 100644
index 0000000..3cc9795c
--- /dev/null
+++ b/integration-tests-cucumber/src/test/java/datalab/endpoint/EndpointSteps.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package datalab.endpoint;
+
+import com.jayway.restassured.http.ContentType;
+import com.jayway.restassured.response.Response;
+import com.jayway.restassured.specification.RequestSpecification;
+import cucumber.api.java.en.And;
+import cucumber.api.java.en.Given;
+import cucumber.api.java.en.Then;
+import cucumber.api.java.en.When;
+import org.apache.datalab.dto.EndpointDTO;
+import org.apache.datalab.mongo.MongoDBHelper;
+import org.apache.datalab.util.JacksonMapper;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import static com.jayway.restassured.RestAssured.given;
+import static datalab.Constants.API_URI;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.core.IsEqual.equalTo;
+
+public class EndpointSteps {
+    private RequestSpecification request;
+    private Response response;
+    private String name;
+
+    @Given("User try to create new endpoint with name {string} and uri {string} and account {string} and {string}")
+    public void userTryToCreateNewEndpoint(String name, String uri, String account, String tag) {
+        this.name = name;
+        request = given().body(JacksonMapper.marshall(new EndpointDTO(name, uri, account, tag)))
+                .auth()
+                .oauth2("token123")
+                .contentType(ContentType.JSON);
+
+    }
+
+    @When("User send create new endpoint request")
+    public void userSendCreateNewEndpoint() {
+        response = request.post(API_URI + "endpoint");
+    }
+
+    @Given("There is no endpoints in DataLab")
+    public void thereIsNoEndpointsInDataLab() {
+        MongoDBHelper.cleanCollection("endpoints");
+
+    }
+
+    @Then("Response status code is {int}")
+    public void responseStatusCodeIs(int code) {
+        assertThat(response.getStatusCode(), equalTo(code));
+    }
+
+    @And("Endpoint URI is present in location header")
+    public void endpointURIIsPresentInLocationHeader() {
+        assertThat(response.getHeader("Location"), equalTo(API_URI + "endpoint/" + name));
+    }
+
+    @When("User try to get information about endpoint with name {string}")
+    public void userTryToGetInformationAboutEndpointWithName(String endpoint) throws URISyntaxException {
+        response = authenticatedRequest()
+                .get(new URI(API_URI + "endpoint/" + endpoint));
+
+    }
+
+    @And("Endpoint information is successfully returned with " +
+            "name {string}, uri {string}, account {string}, and tag {string}")
+    public void endpointInformationIsSuccessfullyReturnedWithNameUriAccountAndTag(String name, String uri,
+                                                                                  String account, String tag) {
+        final EndpointDTO dto = response.getBody().as(EndpointDTO.class);
+        assertThat(dto.getAccount(), equalTo(account));
+        assertThat(dto.getName(), equalTo(name));
+        assertThat(dto.getUrl(), equalTo(uri));
+        assertThat(dto.getTag(), equalTo(tag));
+
+    }
+
+    @When("User try to get information about endpoints")
+    public void userTryToGetInformationAboutEndpoints() throws URISyntaxException {
+        response = authenticatedRequest()
+                .get(new URI(API_URI + "endpoint"));
+
+    }
+
+    @And("There are endpoints with name test1 and test2")
+    public void thereAreEndpointsWithNameTestAndTest() {
+        final EndpointDTO[] endpoints = response.getBody().as(EndpointDTO[].class);
+        assertThat(2, equalTo(endpoints.length));
+        assertThat("test1", equalTo(endpoints[0].getName()));
+        assertThat("test2", equalTo(endpoints[1].getName()));
+    }
+
+    private RequestSpecification authenticatedRequest() {
+        return given()
+                .auth()
+                .oauth2("token123");
+    }
+}
diff --git a/integration-tests-cucumber/src/test/java/datalab/login/LoginSteps.java b/integration-tests-cucumber/src/test/java/datalab/login/LoginSteps.java
new file mode 100644
index 0000000..9351b33
--- /dev/null
+++ b/integration-tests-cucumber/src/test/java/datalab/login/LoginSteps.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package datalab.login;
+
+import com.jayway.restassured.http.ContentType;
+import com.jayway.restassured.response.Response;
+import com.jayway.restassured.specification.RequestSpecification;
+import cucumber.api.java.en.Given;
+import cucumber.api.java.en.Then;
+import cucumber.api.java.en.When;
+import gherkin.deps.com.google.gson.JsonObject;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import static com.jayway.restassured.RestAssured.given;
+import static datalab.Constants.API_URI;
+import static org.hamcrest.core.IsEqual.equalTo;
+import static org.junit.Assert.assertThat;
+
+public class LoginSteps {
+
+
+    private static final String LOGIN_RESOURCE_PATH = API_URI + "user/login";
+    private RequestSpecification request;
+    private Response response;
+
+    @Given("User try to login to Datalab with {string} and {string}")
+    public void userProvidedLoginAndPassword(String username, String password) {
+        JsonObject jsonObject = new JsonObject();
+        jsonObject.addProperty("username", username);
+        jsonObject.addProperty("password", password);
+        request = given().body(jsonObject.toString()).contentType(ContentType.JSON);
+    }
+
+    @When("user try to login")
+    public void userTryToLogin() throws URISyntaxException {
+        response = request.post(new URI(LOGIN_RESOURCE_PATH));
+    }
+
+    @Then("response code is {string}")
+    public void responseCodeIs(String status) {
+        assertThat(response.getStatusCode(), equalTo(Integer.valueOf(status)));
+
+    }
+}
diff --git a/integration-tests-cucumber/src/test/resources/config.properties b/integration-tests-cucumber/src/test/resources/config.properties
new file mode 100644
index 0000000..e1a45b9
--- /dev/null
+++ b/integration-tests-cucumber/src/test/resources/config.properties
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+mongo.connection.string=mongodb://localhost:27017/DATALAB
+mongo.db.name=DATALAB
\ No newline at end of file
diff --git a/integration-tests-cucumber/src/test/resources/datalab/endpoint.feature b/integration-tests-cucumber/src/test/resources/datalab/endpoint.feature
new file mode 100644
index 0000000..0cca194
--- /dev/null
+++ b/integration-tests-cucumber/src/test/resources/datalab/endpoint.feature
@@ -0,0 +1,75 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+Feature: Endpoint management in DataLab
+  Such feature allowed to manage endpoint inside DataLab
+
+  Scenario Outline: Create new endpoint when it does not exist
+
+    Given There is no endpoints in DataLab
+    And User try to create new endpoint with name "<name>" and uri "<uri>" and account "<account>" and "<tag>"
+    When User send create new endpoint request
+    Then Response status code is 200
+    And Endpoint URI is present in location header
+    Examples:
+      | name          | uri     | account   | tag      |
+      | test_endpoint | someuri | 123231312 | some_tag |
+
+
+  Scenario Outline: Create new endpoint when it exist already
+
+    Given There is no endpoints in DataLab
+    And User try to create new endpoint with name "<name>" and uri "<uri>" and account "<account>" and "<tag>"
+    And  User send create new endpoint request
+    When User try to create new endpoint with name "<name>" and uri "<uri>" and account "<account>" and "<tag>"
+    And User send create new endpoint request
+    Then Response status code is 409
+    Examples:
+      | name          | uri     | account   | tag      |
+      | test_endpoint | someuri | 123231312 | some_tag |
+
+
+  Scenario Outline: Get information for endpoint
+
+    Given There is no endpoints in DataLab
+    And User try to create new endpoint with name "<name>" and uri "<uri>" and account "<account>" and "<tag>"
+    And  User send create new endpoint request
+    When User try to get information about endpoint with name "<name>"
+    Then Response status code is 200
+    And Endpoint information is successfully returned with name "<name>", uri "<uri>", account "<account>", and tag "<tag>"
+    Examples:
+      | name          | uri     | account   | tag      |
+      | test_endpoint | someuri | 123231312 | some_tag |
+
+
+  Scenario: Get list of endpoints
+
+    Given There is no endpoints in DataLab
+    And User try to create new endpoint with name "test1" and uri "someuri1" and account "123" and "customTag1"
+    And  User send create new endpoint request
+    And User try to create new endpoint with name "test2" and uri "someuri2" and account "1233" and "customTag4"
+    And  User send create new endpoint request
+    When User try to get information about endpoints
+    Then Response status code is 200
+    And There are endpoints with name test1 and test2
+
+  Scenario: Get not endpoint that does not exist
+
+    Given There is no endpoints in DataLab
+    When User try to get information about endpoint with name "someName"
+    Then Response status code is 404
diff --git a/integration-tests-cucumber/src/test/resources/datalab/login.feature b/integration-tests-cucumber/src/test/resources/datalab/login.feature
new file mode 100644
index 0000000..243a6f3
--- /dev/null
+++ b/integration-tests-cucumber/src/test/resources/datalab/login.feature
@@ -0,0 +1,30 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+Feature: DataLab login API
+  Used to check DataLab login flow
+
+  Scenario Outline: User try to login to DataLab
+    Given User try to login to Datalab with "<username>" and "<password>"
+    When user try to login
+    Then response code is "<status>"
+
+    Examples:
+      | username       | password | status |
+      | test           | pass     | 200    |
+      | not_valid_user | pass     | 401    |
\ No newline at end of file
diff --git a/integration-tests/README.MD b/integration-tests/README.MD
new file mode 100644
index 0000000..e4abd9e
--- /dev/null
+++ b/integration-tests/README.MD
@@ -0,0 +1,80 @@
+# The automation tests
+
+## What is it?
+This is the module with automation tests for DataLab integration testing.
+
+## How to run?
+
+Automation tests could be ran from UNIX and Windows machines.
+The prerequisites are:
+* The environment is established
+* The machine, where tests are going to be ran, has access to Jenkins
+* Set of configurations are performed
+* Set of Java parameters are passed.
+
+## What are java parameters?
+
+The Java parameters are key value pairs prefixed with "-D" passed to Java 
+Required Java parameters are: "jenkins.buildNumber", "cluster.username", "cluster.os.family"
+
+Example:
+* -Djenkins.buildNumber=57 -Dcluster.username=datalab-user -Dcluster.os.family=debian
+
+## What are other required configurations?
+
+### The config.properties file
+ 
+ "config.properties" file example could be found in the module root "example" directory.
+ 
+### Jupyther scenario files
+ Scenario files example can be found in module root "scenario_jupyter" directory.
+ 
+### Amazon instances configuration json files
+Amazon instances configuration json files examples can be found in module root "ec2_templates" directory.
+
+## Automation run modes
+ There are 3 modes: server and dev and dev local. Base on these modes the configuration files should be placed in different locations.
+ 
+### Server mode 
+Server does not require any specific parameters. The configuration files locations is following for server mode:
+* conf.file.location=${conf.root.path}/config.properties
+* keys.directory.location=${conf.root.path}/keys
+* python.files.location=${conf.root.path}/scenario_jupyter/
+* cluster.config.file.location=${conf.root.path}/ec2_templates/
+
+### Dev mode.
+Dev mode has 2 choices run against service deployed in Amazon or against local mocked services. 
+
+#### Dev mode against services deployed locally
+To be able to run tests against locally deployed service, it is required to pass following parameters:
+
+ * -Dconf.root.path=examples
+
+Also you should describe following parameters into config.properties file or to pass it to JVM:
+ * -Drun.mode.local=true
+ * -Duse.jenkins=false
+ * -Dssn.url=https://localhost:8443
+ * -Dservice.base.name=dev
+ * -Dnotebooks.to.test=rstudio,jupyter,zeppelin
+
+#### Dev mode against services deployed in Amazon
+To be able to run tests against services deployed in Amazon, it is required to pass following parameters: 
+ 
+ * -Drun.mode=dev
+ * -Dconf.root.path=examples
+ * -Djenkins.buildNumber=163
+ * -Dnotebooks.to.test=rstudio
+ * -Dexecution.threads=1
+
+
+The "execution.threads" property with value 1  limits threads for notebooks testing to 1. So all tests would be executed synchronously.
+
+To run automation tests in dev mode it is required to pass a few more Java parameters: 
+* "run.mode" set to "dev".
+* 
+
+
+Example:
+* -Drun.mode=dev.
+
+In this case the application configuration is following:
diff --git a/integration-tests/examples/azure_templates/deeplearning/deeplearning-notebook.json b/integration-tests/examples/azure_templates/deeplearning/deeplearning-notebook.json
new file mode 100644
index 0000000..69dea4e
--- /dev/null
+++ b/integration-tests/examples/azure_templates/deeplearning/deeplearning-notebook.json
@@ -0,0 +1,7 @@
+{
+  "image": "docker.datalab-deeplearning",
+  "name": "set the name",
+  "shape": "Standard_NC6",
+  "version": "deeplearning-2.2",
+  "template_name": "Deep Learning 2.2"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/deeplearning/spark_cluster.json b/integration-tests/examples/azure_templates/deeplearning/spark_cluster.json
new file mode 100644
index 0000000..3e17708
--- /dev/null
+++ b/integration-tests/examples/azure_templates/deeplearning/spark_cluster.json
@@ -0,0 +1,8 @@
+{
+  "image": "docker.datalab-dataengine",
+  "name": "set the name",
+  "dataengine_instance_count": "2",
+  "dataengine_instance_shape": "Standard_NC6",
+  "notebook_name": "set notebook name",
+  "template_name": "Apache Spark cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/jupyter/jupyter-notebook.json b/integration-tests/examples/azure_templates/jupyter/jupyter-notebook.json
new file mode 100644
index 0000000..fa661da
--- /dev/null
+++ b/integration-tests/examples/azure_templates/jupyter/jupyter-notebook.json
@@ -0,0 +1,7 @@
+{
+  "image": "docker.datalab-jupyter",
+  "name": "set the name",
+  "shape": "Standard_F2s",
+  "version": "jupyter_notebook-5.7.4",
+  "template_name": "Jupyter notebook 5.7.4"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/jupyter/spark_cluster.json b/integration-tests/examples/azure_templates/jupyter/spark_cluster.json
new file mode 100644
index 0000000..9c5f9ee
--- /dev/null
+++ b/integration-tests/examples/azure_templates/jupyter/spark_cluster.json
@@ -0,0 +1,8 @@
+{
+  "image": "docker.datalab-dataengine",
+  "name": "set the name",
+  "dataengine_instance_count": "2",
+  "dataengine_instance_shape": "Standard_F4s",
+  "notebook_name": "set notebook name",
+  "template_name": "Apache Spark cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/rstudio/rstudio-notebook.json b/integration-tests/examples/azure_templates/rstudio/rstudio-notebook.json
new file mode 100644
index 0000000..3458ed1
--- /dev/null
+++ b/integration-tests/examples/azure_templates/rstudio/rstudio-notebook.json
@@ -0,0 +1,7 @@
+{
+  "image": "docker.datalab-rstudio",
+  "name": "set the name",
+  "shape": "Standard_F2s",
+  "version": "RStudio-1.1.463",
+  "template_name": "RStudio 1.1.463"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/rstudio/spark_cluster.json b/integration-tests/examples/azure_templates/rstudio/spark_cluster.json
new file mode 100644
index 0000000..9c5f9ee
--- /dev/null
+++ b/integration-tests/examples/azure_templates/rstudio/spark_cluster.json
@@ -0,0 +1,8 @@
+{
+  "image": "docker.datalab-dataengine",
+  "name": "set the name",
+  "dataengine_instance_count": "2",
+  "dataengine_instance_shape": "Standard_F4s",
+  "notebook_name": "set notebook name",
+  "template_name": "Apache Spark cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/tensor/spark_cluster.json b/integration-tests/examples/azure_templates/tensor/spark_cluster.json
new file mode 100644
index 0000000..3e17708
--- /dev/null
+++ b/integration-tests/examples/azure_templates/tensor/spark_cluster.json
@@ -0,0 +1,8 @@
+{
+  "image": "docker.datalab-dataengine",
+  "name": "set the name",
+  "dataengine_instance_count": "2",
+  "dataengine_instance_shape": "Standard_NC6",
+  "notebook_name": "set notebook name",
+  "template_name": "Apache Spark cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/tensor/tensor-notebook.json b/integration-tests/examples/azure_templates/tensor/tensor-notebook.json
new file mode 100644
index 0000000..40241e4
--- /dev/null
+++ b/integration-tests/examples/azure_templates/tensor/tensor-notebook.json
@@ -0,0 +1,7 @@
+{
+  "image": "docker.datalab-tensor",
+  "name": "set the name",
+  "shape": "Standard_NC6",
+  "version": "tensorflow_gpu-1.3.0",
+  "template_name": "TensorFlow 1.3.0"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/zeppelin/spark_cluster.json b/integration-tests/examples/azure_templates/zeppelin/spark_cluster.json
new file mode 100644
index 0000000..9c5f9ee
--- /dev/null
+++ b/integration-tests/examples/azure_templates/zeppelin/spark_cluster.json
@@ -0,0 +1,8 @@
+{
+  "image": "docker.datalab-dataengine",
+  "name": "set the name",
+  "dataengine_instance_count": "2",
+  "dataengine_instance_shape": "Standard_F4s",
+  "notebook_name": "set notebook name",
+  "template_name": "Apache Spark cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/zeppelin/zeppelin-notebook.json b/integration-tests/examples/azure_templates/zeppelin/zeppelin-notebook.json
new file mode 100644
index 0000000..7e5f4e4
--- /dev/null
+++ b/integration-tests/examples/azure_templates/zeppelin/zeppelin-notebook.json
@@ -0,0 +1,7 @@
+{
+  "image": "docker.datalab-zeppelin",
+  "name": "set the name",
+  "shape": "Standard_F2s",
+  "version": "zeppelin-0.9.0",
+  "template_name": "Apache Zeppelin 0.9.0"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/config.properties b/integration-tests/examples/config.properties
new file mode 100644
index 0000000..3fb0921
--- /dev/null
+++ b/integration-tests/examples/config.properties
@@ -0,0 +1,86 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+JENKINS_USERNAME=
+JENKINS_PASSWORD=
+USERNAME=
+PASSWORD=
+NOT_IAM_USERNAME=
+NOT_IAM_PASSWORD=
+NOT_DATALAB_USERNAME=
+NOT_DATALAB_PASSWORD=
+JENKINS_JOB_URL=
+USER_FOR_ACTIVATE_KEY=
+PASSWORD_FOR_ACTIVATE_KEY=
+ACCESS_KEY_PRIV_FILE_NAME=
+ACCESS_KEY_PUB_FILE_NAME=
+AWS_ACCESS_KEY_ID=
+AWS_SECRET_ACCESS_KEY=
+AWS_REGION=
+AWS_REQUEST_TIMEOUT=10s
+
+TIMEOUT_JENKINS_AUTOTEST=20m
+TIMEOUT_UPLOAD_KEY=40m
+TIMEOUT_SSN_STARTUP=60m
+CLUSTER_OS_USERNAME=datalab-user
+CLUSTER_OS_FAMILY=debian
+
+#NOTEBOOKS_TO_TEST=\
+#		[\
+#			{\
+#				"notebook_template": "jupyter",\
+#				"data_engine_type": "dataengine",\
+#				"full_test": false,\
+#				"timeout_notebook_create": "60m",\
+#				"timeout_notebook_startup": "20m",\
+#				"timeout_notebook_shutdown": "10m",\
+#				"timeout_cluster_create": "60m",\
+#				"timeout_cluster_terminate": "20m",\
+#				"timeout_lib_groups": "5m",\
+#				"timeout_lib_list": "5m",\
+#				"timeout_lib_install": "15m"\
+#			},\
+#			{\
+#				"notebook_template": "rstudio",\
+#				"data_engine_type": "dataengine-service"\
+#			},\
+#                       {\
+#                               "notebook_template": "zeppelin",\
+#                               "data_engine_type": "dataengine-service"\
+#                       }\
+#		]
+
+NOTEBOOKS_TO_TEST=[\
+                        {\
+                                "notebook_template":"jupyter",\
+                                "data_engine_type":"dataengine",\
+                                "full_test":true\
+                        },\
+                        {\
+                                "notebook_template":"jupyter",\
+                                "data_engine_type":"dataengine-service"\
+                        }\
+                ]
+JUPYTER_SCENARIO_FILES=
+S3_TESTS_TEMPLATE_BUCKET_NAME=
+
+#RUN_MODE_LOCAL=true
+#USE_JENKINS=false
+#SSN_URL=https://localhost:8443
+#SERVICE_BASE_NAME=
diff --git a/integration-tests/examples/copy_files.py b/integration-tests/examples/copy_files.py
new file mode 100644
index 0000000..6f9b99f
--- /dev/null
+++ b/integration-tests/examples/copy_files.py
@@ -0,0 +1,114 @@
+#!/usr/bin/python3
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+import os, sys, json
+import argparse
+import subprocess
+from fabric import *
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--storage', type=str, default='S3/GCP buckets, Azure Blob container / Datalake folder')
+parser.add_argument('--notebook', type=str, default='aws, azure, gcp')
+parser.add_argument('--cloud', type=str, default='aws, azure, gcp')
+parser.add_argument('--azure_storage_account', type=str, default='')
+parser.add_argument('--azure_datalake_account', type=str, default='')
+args = parser.parse_args()
+
+dataset_file = ['airports.csv', 'carriers.csv', '2008.csv.bz2']
+
+def download_dataset():
+    try:
+        for f in dataset_file:
+            subprocess.run('wget http://stat-computing.org/dataexpo/2009/{0} -O /tmp/{0}'.format(f), shell=True, check=True)
+    except Exception as err:
+        print('Failed to download test dataset', str(err))
+        sys.exit(1)
+
+def upload_aws():
+    try:
+        for f in dataset_file:
+            subprocess.run('aws s3 cp /tmp/{0} s3://{1}/{2}_dataset/ --sse AES256'.format(f, args.storage, args.notebook), shell=True, check=True)
+    except Exception as err:
+        print('Failed to upload test dataset to bucket', str(err))
+        sys.exit(1)
+
+def upload_azure_datalake():
+    try:
+        from azure.datalake.store import core, lib, multithread
+        sp_creds = json.loads(open(os.environ['AZURE_AUTH_LOCATION']).read())
+        dl_filesystem_creds = lib.auth(tenant_id=json.dumps(sp_creds['tenantId']).replace('"', ''),
+                                       client_secret=json.dumps(sp_creds['clientSecret']).replace('"', ''),
+                                       client_id=json.dumps(sp_creds['clientId']).replace('"', ''),
+                                       resource='https://datalake.azure.net/')
+        datalake_client = core.AzureDLFileSystem(dl_filesystem_creds, store_name=args.azure_datalake_account)
+        for f in dataset_file:
+            multithread.ADLUploader(datalake_client,
+                                    lpath='/tmp/{0}'.format(f),
+                                    rpath='{0}/{1}_dataset/{2}'.format(args.storage, args.notebook, f))
+    except Exception as err:
+        print('Failed to upload test dataset to datalake store', str(err))
+        sys.exit(1)
+
+def upload_azure_blob():
+    try:
+        from azure.mgmt.storage import StorageManagementClient
+        from azure.storage.blob import BlockBlobService
+        from azure.common.client_factory import get_client_from_auth_file
+        storage_client = get_client_from_auth_file(StorageManagementClient)
+        resource_group_name = ''
+        for i in storage_client.storage_accounts.list():
+            if args.storage.replace('container', 'storage') == str(i.tags.get('Name')):
+                resource_group_name = str(i.tags.get('SBN'))
+        secret_key = storage_client.storage_accounts.list_keys(resource_group_name, args.azure_storage_account).keys[0].value
+        block_blob_service = BlockBlobService(account_name=args.azure_storage_account, account_key=secret_key)
+        for f in dataset_file:
+            block_blob_service.create_blob_from_path(args.storage, '{0}_dataset/{1}'.format(args.notebook, f), '/tmp/{0}'.format(f))
+    except Exception as err:
+        print('Failed to upload test dataset to blob storage', str(err))
+        sys.exit(1)
+
+def upload_gcp():
+    try:
+        for f in dataset_file:
+            subprocess.run('sudo gsutil -m cp /tmp/{0} gs://{1}/{2}_dataset/'.format(f, args.storage, args.notebook), shell=True, check=True)
+    except Exception as err:
+        print('Failed to upload test dataset to bucket', str(err))
+        sys.exit(1)
+
+if __name__ == "__main__":
+    download_dataset()
+    if args.cloud == 'aws':
+        upload_aws()
+    elif args.cloud == 'azure':
+        os.environ['AZURE_AUTH_LOCATION'] = '/home/datalab-user/keys/azure_auth.json'
+        if args.azure_datalake_account:
+            upload_azure_datalake()
+        else:
+            upload_azure_blob()
+    elif args.cloud == 'gcp':
+        upload_gcp()
+    else:
+        print('Error! Unknown cloud provider.')
+        sys.exit(1)
+
+    sys.exit(0)
diff --git a/integration-tests/examples/ec2_templates/deeplearning/EMR.json b/integration-tests/examples/ec2_templates/deeplearning/EMR.json
new file mode 100644
index 0000000..01d4d02
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/deeplearning/EMR.json
@@ -0,0 +1,10 @@
+{
+  "image": "docker.datalab-dataengine-service",
+  "name": "set the name",
+  "emr_instance_count": "2",
+  "emr_master_instance_type": "c4.xlarge",
+  "emr_slave_instance_type": "c4.xlarge",
+  "emr_version": "emr-5.12.0",
+  "notebook_name": "set notebook name",
+  "template_name": "EMR cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/deeplearning/EMR_spot.json b/integration-tests/examples/ec2_templates/deeplearning/EMR_spot.json
new file mode 100644
index 0000000..5fed763
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/deeplearning/EMR_spot.json
@@ -0,0 +1,12 @@
+{
+  "image": "docker.datalab-dataengine-service",
+  "name": "set the name",
+  "emr_instance_count": "3",
+  "emr_master_instance_type": "c4.large",
+  "emr_slave_instance_type": "c4.large",
+  "emr_slave_instance_spot": true,
+  "emr_slave_instance_spot_pct_price": 40,
+  "emr_version": "emr-5.12.0",
+  "notebook_name": "set notebook name",
+  "template_name": "EMR cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/deeplearning/deeplearning-notebook.json b/integration-tests/examples/ec2_templates/deeplearning/deeplearning-notebook.json
new file mode 100644
index 0000000..1430f00
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/deeplearning/deeplearning-notebook.json
@@ -0,0 +1,7 @@
+{
+  "image": "docker.datalab-deeplearning",
+  "name": "set the name",
+  "shape": "p2.xlarge",
+  "version": "deeplearning-2.2",
+  "template_name": "Deep Learning 2.2"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/deeplearning/spark_cluster.json b/integration-tests/examples/ec2_templates/deeplearning/spark_cluster.json
new file mode 100644
index 0000000..357fe01
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/deeplearning/spark_cluster.json
@@ -0,0 +1,8 @@
+{
+  "image": "docker.datalab-dataengine",
+  "name": "set the name",
+  "dataengine_instance_count": "2",
+  "dataengine_instance_shape": "p2.xlarge",
+  "notebook_name": "set notebook name",
+  "template_name": "Apache Spark cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/jupyter/EMR.json b/integration-tests/examples/ec2_templates/jupyter/EMR.json
new file mode 100644
index 0000000..01d4d02
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/jupyter/EMR.json
@@ -0,0 +1,10 @@
+{
+  "image": "docker.datalab-dataengine-service",
+  "name": "set the name",
+  "emr_instance_count": "2",
+  "emr_master_instance_type": "c4.xlarge",
+  "emr_slave_instance_type": "c4.xlarge",
+  "emr_version": "emr-5.12.0",
+  "notebook_name": "set notebook name",
+  "template_name": "EMR cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/jupyter/EMR_spot.json b/integration-tests/examples/ec2_templates/jupyter/EMR_spot.json
new file mode 100644
index 0000000..5fed763
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/jupyter/EMR_spot.json
@@ -0,0 +1,12 @@
+{
+  "image": "docker.datalab-dataengine-service",
+  "name": "set the name",
+  "emr_instance_count": "3",
+  "emr_master_instance_type": "c4.large",
+  "emr_slave_instance_type": "c4.large",
+  "emr_slave_instance_spot": true,
+  "emr_slave_instance_spot_pct_price": 40,
+  "emr_version": "emr-5.12.0",
+  "notebook_name": "set notebook name",
+  "template_name": "EMR cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/jupyter/jupyter-notebook.json b/integration-tests/examples/ec2_templates/jupyter/jupyter-notebook.json
new file mode 100644
index 0000000..94db781
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/jupyter/jupyter-notebook.json
@@ -0,0 +1,7 @@
+{
+  "image": "docker.datalab-jupyter",
+  "name": "set the name",
+  "shape": "t2.medium",
+  "version": "jupyter_notebook-5.7.4",
+  "template_name": "Jupyter notebook 5.7.4"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/jupyter/spark_cluster.json b/integration-tests/examples/ec2_templates/jupyter/spark_cluster.json
new file mode 100644
index 0000000..2a712e3
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/jupyter/spark_cluster.json
@@ -0,0 +1,8 @@
+{
+  "image": "docker.datalab-dataengine",
+  "name": "set the name",
+  "dataengine_instance_count": "2",
+  "dataengine_instance_shape": "c4.xlarge",
+  "notebook_name": "set notebook name",
+  "template_name": "Apache Spark cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/rstudio/EMR.json b/integration-tests/examples/ec2_templates/rstudio/EMR.json
new file mode 100644
index 0000000..01d4d02
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/rstudio/EMR.json
@@ -0,0 +1,10 @@
+{
+  "image": "docker.datalab-dataengine-service",
+  "name": "set the name",
+  "emr_instance_count": "2",
+  "emr_master_instance_type": "c4.xlarge",
+  "emr_slave_instance_type": "c4.xlarge",
+  "emr_version": "emr-5.12.0",
+  "notebook_name": "set notebook name",
+  "template_name": "EMR cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/rstudio/EMR_spot.json b/integration-tests/examples/ec2_templates/rstudio/EMR_spot.json
new file mode 100644
index 0000000..5fed763
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/rstudio/EMR_spot.json
@@ -0,0 +1,12 @@
+{
+  "image": "docker.datalab-dataengine-service",
+  "name": "set the name",
+  "emr_instance_count": "3",
+  "emr_master_instance_type": "c4.large",
+  "emr_slave_instance_type": "c4.large",
+  "emr_slave_instance_spot": true,
+  "emr_slave_instance_spot_pct_price": 40,
+  "emr_version": "emr-5.12.0",
+  "notebook_name": "set notebook name",
+  "template_name": "EMR cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/rstudio/rstudio-notebook.json b/integration-tests/examples/ec2_templates/rstudio/rstudio-notebook.json
new file mode 100644
index 0000000..366065a
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/rstudio/rstudio-notebook.json
@@ -0,0 +1,7 @@
+{
+  "image": "docker.datalab-rstudio",
+  "name": "set the name",
+  "shape": "t2.medium",
+  "version": "RStudio-1.1.463",
+  "template_name": "RStudio 1.1.463"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/rstudio/spark_cluster.json b/integration-tests/examples/ec2_templates/rstudio/spark_cluster.json
new file mode 100644
index 0000000..2a712e3
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/rstudio/spark_cluster.json
@@ -0,0 +1,8 @@
+{
+  "image": "docker.datalab-dataengine",
+  "name": "set the name",
+  "dataengine_instance_count": "2",
+  "dataengine_instance_shape": "c4.xlarge",
+  "notebook_name": "set notebook name",
+  "template_name": "Apache Spark cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/tensor/EMR.json b/integration-tests/examples/ec2_templates/tensor/EMR.json
new file mode 100644
index 0000000..01d4d02
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/tensor/EMR.json
@@ -0,0 +1,10 @@
+{
+  "image": "docker.datalab-dataengine-service",
+  "name": "set the name",
+  "emr_instance_count": "2",
+  "emr_master_instance_type": "c4.xlarge",
+  "emr_slave_instance_type": "c4.xlarge",
+  "emr_version": "emr-5.12.0",
+  "notebook_name": "set notebook name",
+  "template_name": "EMR cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/tensor/EMR_spot.json b/integration-tests/examples/ec2_templates/tensor/EMR_spot.json
new file mode 100644
index 0000000..5fed763
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/tensor/EMR_spot.json
@@ -0,0 +1,12 @@
+{
+  "image": "docker.datalab-dataengine-service",
+  "name": "set the name",
+  "emr_instance_count": "3",
+  "emr_master_instance_type": "c4.large",
+  "emr_slave_instance_type": "c4.large",
+  "emr_slave_instance_spot": true,
+  "emr_slave_instance_spot_pct_price": 40,
+  "emr_version": "emr-5.12.0",
+  "notebook_name": "set notebook name",
+  "template_name": "EMR cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/tensor/spark_cluster.json b/integration-tests/examples/ec2_templates/tensor/spark_cluster.json
new file mode 100644
index 0000000..357fe01
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/tensor/spark_cluster.json
@@ -0,0 +1,8 @@
+{
+  "image": "docker.datalab-dataengine",
+  "name": "set the name",
+  "dataengine_instance_count": "2",
+  "dataengine_instance_shape": "p2.xlarge",
+  "notebook_name": "set notebook name",
+  "template_name": "Apache Spark cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/tensor/tensor-notebook.json b/integration-tests/examples/ec2_templates/tensor/tensor-notebook.json
new file mode 100644
index 0000000..5746900
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/tensor/tensor-notebook.json
@@ -0,0 +1,7 @@
+{
+  "image": "docker.datalab-tensor",
+  "name": "set the name",
+  "shape": "p2.xlarge",
+  "version": "tensorflow_gpu-1.3.0",
+  "template_name": "TensorFlow 1.3.0"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/zeppelin/EMR.json b/integration-tests/examples/ec2_templates/zeppelin/EMR.json
new file mode 100644
index 0000000..01d4d02
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/zeppelin/EMR.json
@@ -0,0 +1,10 @@
+{
+  "image": "docker.datalab-dataengine-service",
+  "name": "set the name",
+  "emr_instance_count": "2",
+  "emr_master_instance_type": "c4.xlarge",
+  "emr_slave_instance_type": "c4.xlarge",
+  "emr_version": "emr-5.12.0",
+  "notebook_name": "set notebook name",
+  "template_name": "EMR cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/zeppelin/EMR_spot.json b/integration-tests/examples/ec2_templates/zeppelin/EMR_spot.json
new file mode 100644
index 0000000..5fed763
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/zeppelin/EMR_spot.json
@@ -0,0 +1,12 @@
+{
+  "image": "docker.datalab-dataengine-service",
+  "name": "set the name",
+  "emr_instance_count": "3",
+  "emr_master_instance_type": "c4.large",
+  "emr_slave_instance_type": "c4.large",
+  "emr_slave_instance_spot": true,
+  "emr_slave_instance_spot_pct_price": 40,
+  "emr_version": "emr-5.12.0",
+  "notebook_name": "set notebook name",
+  "template_name": "EMR cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/zeppelin/spark_cluster.json b/integration-tests/examples/ec2_templates/zeppelin/spark_cluster.json
new file mode 100644
index 0000000..2a712e3
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/zeppelin/spark_cluster.json
@@ -0,0 +1,8 @@
+{
+  "image": "docker.datalab-dataengine",
+  "name": "set the name",
+  "dataengine_instance_count": "2",
+  "dataengine_instance_shape": "c4.xlarge",
+  "notebook_name": "set notebook name",
+  "template_name": "Apache Spark cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/zeppelin/zeppelin-notebook.json b/integration-tests/examples/ec2_templates/zeppelin/zeppelin-notebook.json
new file mode 100644
index 0000000..e080471
--- /dev/null
+++ b/integration-tests/examples/ec2_templates/zeppelin/zeppelin-notebook.json
@@ -0,0 +1,7 @@
+{
+  "image": "docker.datalab-zeppelin",
+  "name": "set the name",
+  "shape": "t2.medium",
+  "version": "zeppelin-0.9.0",
+  "template_name": "Apache Zeppelin 0.9.0"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/deeplearning/dataproc.json b/integration-tests/examples/gcp_templates/deeplearning/dataproc.json
new file mode 100644
index 0000000..7d80aea
--- /dev/null
+++ b/integration-tests/examples/gcp_templates/deeplearning/dataproc.json
@@ -0,0 +1,12 @@
+{
+  "image": "docker.datalab-dataengine-service",
+  "name": "set the name",
+  "dataproc_master_count": "1",
+  "dataproc_slave_count": "2",
+  "dataproc_preemptible_count": "0",
+  "dataproc_master_instance_type": "n1-standard-2",
+  "dataproc_slave_instance_type": "n1-standard-2",
+  "dataproc_version": "1.2",
+  "notebook_name": "set notebook name",
+  "template_name": "Dataproc cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/deeplearning/deeplearning-notebook.json b/integration-tests/examples/gcp_templates/deeplearning/deeplearning-notebook.json
new file mode 100644
index 0000000..98baf0f
--- /dev/null
+++ b/integration-tests/examples/gcp_templates/deeplearning/deeplearning-notebook.json
@@ -0,0 +1,7 @@
+{
+  "image": "docker.datalab-deeplearning",
+  "name": "set the name",
+  "shape": "n1-highcpu-8",
+  "version": "deeplearning-1.9",
+  "template_name": "Deep Learning 1.9"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/deeplearning/spark_cluster.json b/integration-tests/examples/gcp_templates/deeplearning/spark_cluster.json
new file mode 100644
index 0000000..96e78d1
--- /dev/null
+++ b/integration-tests/examples/gcp_templates/deeplearning/spark_cluster.json
@@ -0,0 +1,8 @@
+{
+  "image": "docker.datalab-dataengine",
+  "name": "set the name",
+  "dataengine_instance_count": "2",
+  "dataengine_instance_shape": "n1-standard-2",
+  "notebook_name": "set notebook name",
+  "template_name": "Apache Spark cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/jupyter/dataproc.json b/integration-tests/examples/gcp_templates/jupyter/dataproc.json
new file mode 100644
index 0000000..7d80aea
--- /dev/null
+++ b/integration-tests/examples/gcp_templates/jupyter/dataproc.json
@@ -0,0 +1,12 @@
+{
+  "image": "docker.datalab-dataengine-service",
+  "name": "set the name",
+  "dataproc_master_count": "1",
+  "dataproc_slave_count": "2",
+  "dataproc_preemptible_count": "0",
+  "dataproc_master_instance_type": "n1-standard-2",
+  "dataproc_slave_instance_type": "n1-standard-2",
+  "dataproc_version": "1.2",
+  "notebook_name": "set notebook name",
+  "template_name": "Dataproc cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/jupyter/jupyter-notebook.json b/integration-tests/examples/gcp_templates/jupyter/jupyter-notebook.json
new file mode 100644
index 0000000..ba385c7
--- /dev/null
+++ b/integration-tests/examples/gcp_templates/jupyter/jupyter-notebook.json
@@ -0,0 +1,7 @@
+{
+  "image": "docker.datalab-jupyter",
+  "name": "set the name",
+  "shape": "n1-standard-2",
+  "version": "jupyter_notebook-5.7.4",
+  "template_name": "Jupyter notebook 5.7.4"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/jupyter/spark_cluster.json b/integration-tests/examples/gcp_templates/jupyter/spark_cluster.json
new file mode 100644
index 0000000..96e78d1
--- /dev/null
+++ b/integration-tests/examples/gcp_templates/jupyter/spark_cluster.json
@@ -0,0 +1,8 @@
+{
+  "image": "docker.datalab-dataengine",
+  "name": "set the name",
+  "dataengine_instance_count": "2",
+  "dataengine_instance_shape": "n1-standard-2",
+  "notebook_name": "set notebook name",
+  "template_name": "Apache Spark cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/rstudio/dataproc.json b/integration-tests/examples/gcp_templates/rstudio/dataproc.json
new file mode 100644
index 0000000..7d80aea
--- /dev/null
+++ b/integration-tests/examples/gcp_templates/rstudio/dataproc.json
@@ -0,0 +1,12 @@
+{
+  "image": "docker.datalab-dataengine-service",
+  "name": "set the name",
+  "dataproc_master_count": "1",
+  "dataproc_slave_count": "2",
+  "dataproc_preemptible_count": "0",
+  "dataproc_master_instance_type": "n1-standard-2",
+  "dataproc_slave_instance_type": "n1-standard-2",
+  "dataproc_version": "1.2",
+  "notebook_name": "set notebook name",
+  "template_name": "Dataproc cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/rstudio/rstudio-notebook.json b/integration-tests/examples/gcp_templates/rstudio/rstudio-notebook.json
new file mode 100644
index 0000000..3e1187d
--- /dev/null
+++ b/integration-tests/examples/gcp_templates/rstudio/rstudio-notebook.json
@@ -0,0 +1,7 @@
+{
+  "image": "docker.datalab-rstudio",
+  "name": "set the name",
+  "shape": "n1-standard-2",
+  "version": "RStudio-1.1.463",
+  "template_name": "RStudio 1.1.463"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/rstudio/spark_cluster.json b/integration-tests/examples/gcp_templates/rstudio/spark_cluster.json
new file mode 100644
index 0000000..96e78d1
--- /dev/null
+++ b/integration-tests/examples/gcp_templates/rstudio/spark_cluster.json
@@ -0,0 +1,8 @@
+{
+  "image": "docker.datalab-dataengine",
+  "name": "set the name",
+  "dataengine_instance_count": "2",
+  "dataengine_instance_shape": "n1-standard-2",
+  "notebook_name": "set notebook name",
+  "template_name": "Apache Spark cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/tensor/dataproc.json b/integration-tests/examples/gcp_templates/tensor/dataproc.json
new file mode 100644
index 0000000..7d80aea
--- /dev/null
+++ b/integration-tests/examples/gcp_templates/tensor/dataproc.json
@@ -0,0 +1,12 @@
+{
+  "image": "docker.datalab-dataengine-service",
+  "name": "set the name",
+  "dataproc_master_count": "1",
+  "dataproc_slave_count": "2",
+  "dataproc_preemptible_count": "0",
+  "dataproc_master_instance_type": "n1-standard-2",
+  "dataproc_slave_instance_type": "n1-standard-2",
+  "dataproc_version": "1.2",
+  "notebook_name": "set notebook name",
+  "template_name": "Dataproc cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/tensor/spark_cluster.json b/integration-tests/examples/gcp_templates/tensor/spark_cluster.json
new file mode 100644
index 0000000..96e78d1
--- /dev/null
+++ b/integration-tests/examples/gcp_templates/tensor/spark_cluster.json
@@ -0,0 +1,8 @@
+{
+  "image": "docker.datalab-dataengine",
+  "name": "set the name",
+  "dataengine_instance_count": "2",
+  "dataengine_instance_shape": "n1-standard-2",
+  "notebook_name": "set notebook name",
+  "template_name": "Apache Spark cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/tensor/tensor-notebook.json b/integration-tests/examples/gcp_templates/tensor/tensor-notebook.json
new file mode 100644
index 0000000..3dca522
--- /dev/null
+++ b/integration-tests/examples/gcp_templates/tensor/tensor-notebook.json
@@ -0,0 +1,7 @@
+{
+  "image": "docker.datalab-tensor",
+  "name": "set the name",
+  "shape": "n1-highcpu-8",
+  "version": "tensorflow_gpu-1.4.0",
+  "template_name": "TensorFlow 1.4.0"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/zeppelin/dataproc.json b/integration-tests/examples/gcp_templates/zeppelin/dataproc.json
new file mode 100644
index 0000000..7d80aea
--- /dev/null
+++ b/integration-tests/examples/gcp_templates/zeppelin/dataproc.json
@@ -0,0 +1,12 @@
+{
+  "image": "docker.datalab-dataengine-service",
+  "name": "set the name",
+  "dataproc_master_count": "1",
+  "dataproc_slave_count": "2",
+  "dataproc_preemptible_count": "0",
+  "dataproc_master_instance_type": "n1-standard-2",
+  "dataproc_slave_instance_type": "n1-standard-2",
+  "dataproc_version": "1.2",
+  "notebook_name": "set notebook name",
+  "template_name": "Dataproc cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/zeppelin/spark_cluster.json b/integration-tests/examples/gcp_templates/zeppelin/spark_cluster.json
new file mode 100644
index 0000000..96e78d1
--- /dev/null
+++ b/integration-tests/examples/gcp_templates/zeppelin/spark_cluster.json
@@ -0,0 +1,8 @@
+{
+  "image": "docker.datalab-dataengine",
+  "name": "set the name",
+  "dataengine_instance_count": "2",
+  "dataengine_instance_shape": "n1-standard-2",
+  "notebook_name": "set notebook name",
+  "template_name": "Apache Spark cluster"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/zeppelin/zeppelin-notebook.json b/integration-tests/examples/gcp_templates/zeppelin/zeppelin-notebook.json
new file mode 100644
index 0000000..6680874
--- /dev/null
+++ b/integration-tests/examples/gcp_templates/zeppelin/zeppelin-notebook.json
@@ -0,0 +1,7 @@
+{
+  "image": "docker.datalab-zeppelin",
+  "name": "set the name",
+  "shape": "n1-standard-2",
+  "version": "zeppelin-0.9.0",
+  "template_name": "Apache Zeppelin 0.9.0"
+}
\ No newline at end of file
diff --git a/integration-tests/examples/scenario_deeplearning/deeplearning_tests.py b/integration-tests/examples/scenario_deeplearning/deeplearning_tests.py
new file mode 100644
index 0000000..540bede
--- /dev/null
+++ b/integration-tests/examples/scenario_deeplearning/deeplearning_tests.py
@@ -0,0 +1,141 @@
+#!/usr/bin/python3
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+import os, sys, json
+from fabric import *
+import argparse
+import subprocess
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--storage', type=str, default='')
+parser.add_argument('--cloud', type=str, default='')
+parser.add_argument('--os_user', type=str, default='')
+parser.add_argument('--cluster_name', type=str, default='')
+parser.add_argument('--azure_storage_account', type=str, default='')
+parser.add_argument('--azure_datalake_account', type=str, default='')
+args = parser.parse_args()
+
+
+def prepare_templates():
+    try:
+        subprocess.run('/bin/bash -c "source /etc/profile && wget http://files.fast.ai/data/dogscats.zip -O /tmp/dogscats.zip"', shell=True, check=True)
+        subprocess.run('unzip -q /tmp/dogscats.zip -d /tmp', shell=True, check=True)
+        subprocess.run('/bin/bash -c "mkdir -p /home/{0}/{1}"'.format(args.os_user, "{test,train}"), shell=True, check=True)
+        subprocess.run('mv /tmp/dogscats/test1/* /home/{0}/test'.format(args.os_user), shell=True, check=True)
+        subprocess.run('/bin/bash -c "mv /tmp/dogscats/valid/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user), shell=True, check=True)
+        subprocess.run('/bin/bash -c "mv /tmp/dogscats/train/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user), shell=True, check=True)
+    except Exception as err:
+        print('Failed to download/unpack image dataset!', str(err))
+        sys.exit(1)
+    subprocess.run('mkdir -p /home/{0}/logs'.format(args.os_user), shell=True, check=True)
+    subprocess.run('mv /tmp/deeplearning /home/{0}/test_templates'.format(args.os_user), shell=True, check=True)
+
+def get_storage():
+    storages = {"aws": args.storage,
+                "azure": "{0}@{1}.blob.core.windows.net".format(args.storage, args.azure_storage_account),
+                "gcp": args.storage}
+    protocols = {"aws": "s3a", "azure": "wasbs", "gcp": "gs"}
+    if args.azure_datalake_account:
+        storages['azure'] = "{0}.azuredatalakestore.net/{1}".format(args.azure_datalake_account, args.storage)
+        protocols['azure'] = 'adl'
+    return (storages[args.cloud], protocols[args.cloud])
+
+def prepare_ipynb(kernel_name, template_path, ipynb_name):
+    with open(template_path, 'r') as f:
+        text = f.read()
+    text = text.replace('KERNEL_NAME', kernel_name)
+    with open('/home/{}/{}.ipynb'.format(args.os_user, ipynb_name), 'w') as f:
+        f.write(text)
+
+def run_ipynb(ipynb_name):
+    subprocess.run('''bash -l -c 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64: ''' \
+    '''/usr/lib64/openmpi/lib; jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb' '''.format(args.os_user, ipynb_name), shell=True, check=True)
+
+def run_tensor():
+    interpreters = ['pyspark_local']
+    for i in interpreters:
+        prepare_ipynb(i, '/home/{}/test_templates/template_preparation_tensor.ipynb'.format(args.os_user), 'preparation_tensor')
+        run_ipynb('preparation_tensor')
+        prepare_ipynb(i, '/home/{}/test_templates/template_visualization_tensor.ipynb'.format(args.os_user), 'visualization_tensor')
+        run_ipynb('visualization_tensor')
+
+def run_caffe():
+    interpreters = ['pyspark_local']
+    for i in interpreters:
+        prepare_ipynb(i, '/home/{}/test_templates/template_caffe.ipynb'.format(args.os_user), 'test_caffe')
+        run_ipynb('test_caffe')
+
+def run_caffe2():
+    interpreters = ['pyspark_local']
+    for i in interpreters:
+        prepare_ipynb(i, '/home/{}/test_templates/template_caffe2.ipynb'.format(args.os_user), 'test_caffe2')
+        run_ipynb('test_caffe2')
+
+def run_cntk():
+    interpreters = ['pyspark_local']
+    for i in interpreters:
+        prepare_ipynb(i, '/home/{}/test_templates/template_cntk.ipynb'.format(args.os_user), 'test_cntk')
+        run_ipynb('test_cntk')
+
+def run_keras():
+    interpreters = ['pyspark_local']
+    for i in interpreters:
+        prepare_ipynb(i, '/home/{}/test_templates/template_keras.ipynb'.format(args.os_user), 'test_keras')
+        run_ipynb('test_keras')
+
+def run_mxnet():
+    interpreters = ['pyspark_local']
+    for i in interpreters:
+        prepare_ipynb(i, '/home/{}/test_templates/template_mxnet.ipynb'.format(args.os_user), 'test_mxnet')
+        run_ipynb('test_mxnet')
+
+def run_theano():
+    interpreters = ['pyspark_local']
+    for i in interpreters:
+        prepare_ipynb(i, '/home/{}/test_templates/template_theano.ipynb'.format(args.os_user), 'test_theano')
+        run_ipynb('test_theano')
+
+#def run_torch():
+#    interpreters = ['itorch']
+#    for i in interpreters:
+#        prepare_ipynb(i, '/home/{}/test_templates/template_torch.ipynb'.format(args.os_user), 'test_torch')
+#        run_ipynb('test_torch')
+
+
+if __name__ == "__main__":
+    try:
+        prepare_templates()
+        run_tensor()
+        run_caffe()
+        run_caffe2()
+        run_cntk()
+        run_keras()
+        run_mxnet()
+        run_theano()
+        #run_torch()
+    except Exception as err:
+        print('Error!', str(err))
+        sys.exit(1)
+
+    sys.exit(0)
\ No newline at end of file
diff --git a/integration-tests/examples/scenario_jupyter/jupyter_tests.py b/integration-tests/examples/scenario_jupyter/jupyter_tests.py
new file mode 100644
index 0000000..85aec6b
--- /dev/null
+++ b/integration-tests/examples/scenario_jupyter/jupyter_tests.py
@@ -0,0 +1,102 @@
+#!/usr/bin/python3
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+import os, sys, json
+from fabric import *
+import argparse
+import subprocess
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--storage', type=str, default='')
+parser.add_argument('--cloud', type=str, default='')
+parser.add_argument('--os_user', type=str, default='')
+parser.add_argument('--cluster_name', type=str, default='')
+parser.add_argument('--azure_storage_account', type=str, default='')
+parser.add_argument('--azure_datalake_account', type=str, default='')
+args = parser.parse_args()
+
+
+def prepare_templates():
+    subprocess.run('mv /tmp/jupyter /home/{0}/test_templates'.format(args.os_user), shell=True, check=True)
+
+def get_storage():
+    storages = {"aws": args.storage,
+                "azure": "{0}@{1}.blob.core.windows.net".format(args.storage, args.azure_storage_account),
+                "gcp": args.storage}
+    protocols = {"aws": "s3a", "azure": "wasbs", "gcp": "gs"}
+    if args.azure_datalake_account:
+        storages['azure'] = "{0}.azuredatalakestore.net/{1}".format(args.azure_datalake_account, args.storage)
+        protocols['azure'] = 'adl'
+    return (storages[args.cloud], protocols[args.cloud])
+
+def prepare_ipynb(kernel_name, template_path, ipynb_name):
+    with open(template_path, 'r') as f:
+        text = f.read()
+    text = text.replace('WORKING_STORAGE', get_storage()[0])
+    text = text.replace('PROTOCOL_NAME', get_storage()[1])
+    text = text.replace('KERNEL_NAME', kernel_name)
+    with open('/home/{}/{}.ipynb'.format(args.os_user, ipynb_name), 'w') as f:
+        f.write(text)
+
+def run_ipynb(ipynb_name):
+    subprocess.run('jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb'.format(args.os_user, ipynb_name), shell=True, check=True)
+
+def run_pyspark():
+    interpreters = ['pyspark_local', 'pyspark_' + args.cluster_name]
+    for i in interpreters:
+        prepare_ipynb(i, '/home/{}/test_templates/template_preparation_pyspark.ipynb'.format(args.os_user),
+                      'preparation_pyspark')
+        run_ipynb('preparation_pyspark')
+        prepare_ipynb(i, '/home/{}/test_templates/template_visualization_pyspark.ipynb'.format(args.os_user),
+                      'visualization_pyspark')
+        run_ipynb('visualization_pyspark')
+
+def run_spark():
+    interpreters = ['apache_toree_scala', 'toree_' + args.cluster_name]
+    for i in interpreters:
+        prepare_ipynb(i, '/home/{}/test_templates/template_preparation_spark.ipynb'.format(args.os_user),
+                      'preparation_spark')
+        run_ipynb('preparation_spark')
+
+def run_sparkr():
+    interpreters = ['ir', 'r_' + args.cluster_name]
+    for i in interpreters:
+        prepare_ipynb(i, '/home/{}/test_templates/template_preparation_sparkr.ipynb'.format(args.os_user),
+                      'preparation_sparkr')
+        run_ipynb('preparation_sparkr')
+        prepare_ipynb(i, '/home/{}/test_templates/template_visualization_sparkr.ipynb'.format(args.os_user),
+                      'visualization_sparkr')
+        run_ipynb('visualization_sparkr')
+
+
+if __name__ == "__main__":
+    try:
+        prepare_templates()
+        run_pyspark()
+        run_spark()
+        run_sparkr()
+    except Exception as err:
+        print('Error!', str(err))
+        sys.exit(1)
+
+    sys.exit(0)
\ No newline at end of file
diff --git a/integration-tests/examples/scenario_rstudio/rstudio_tests.py b/integration-tests/examples/scenario_rstudio/rstudio_tests.py
new file mode 100644
index 0000000..6193b85
--- /dev/null
+++ b/integration-tests/examples/scenario_rstudio/rstudio_tests.py
@@ -0,0 +1,95 @@
+#!/usr/bin/python3
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+import os, sys, json
+from fabric import *
+import argparse
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--storage', type=str, default='')
+parser.add_argument('--cloud', type=str, default='')
+parser.add_argument('--os_user', type=str, default='')
+parser.add_argument('--cluster_name', type=str, default='')
+parser.add_argument('--azure_storage_account', type=str, default='')
+parser.add_argument('--azure_datalake_account', type=str, default='')
+args = parser.parse_args()
+
+
+def prepare_templates():
+    subprocess.run('mv /tmp/rstudio /home/{0}/test_templates'.format(args.os_user), shell=True, check=True)
+
+def get_storage():
+    storages = {"aws": args.storage,
+                "azure": "{0}@{1}.blob.core.windows.net".format(args.storage, args.azure_storage_account),
+                "gcp": args.storage}
+    protocols = {"aws": "s3a", "azure": "wasbs", "gcp": "gs"}
+    if args.azure_datalake_account:
+        storages['azure'] = "{0}.azuredatalakestore.net/{1}".format(args.azure_datalake_account, args.storage)
+        protocols['azure'] = 'adl'
+    return (storages[args.cloud], protocols[args.cloud])
+
+def prepare_rscript(template_path, rscript_name, kernel='remote'):
+    with open(template_path, 'r') as f:
+        text = f.read()
+    text = text.replace('WORKING_STORAGE', get_storage()[0])
+    text = text.replace('PROTOCOL_NAME', get_storage()[1])
+    if kernel == 'remote':
+        if '-de-' in args.cluster_name:
+            text = text.replace('MASTER', 'master')
+        elif '-des-' in args.cluster_name:
+            text = text.replace('MASTER', 'master = "yarn"')
+    elif kernel == 'local':
+        text = text.replace('MASTER', 'master = "local[*]"')
+    with open('/home/{}/{}.r'.format(args.os_user, rscript_name), 'w') as f:
+        f.write(text)
+
+def enable_local_kernel():
+    subprocess.run("sed -i 's/^master/#master/' /home/{0}/.Rprofile".format(args.os_user), shell=True, check=True)
+    subprocess.run('''sed -i "s/^/#/g" /home/{0}/.Renviron'''.format(args.os_user), shell=True, check=True)
+    subprocess.run('''sed -i "/\/opt\/spark\//s/#//g" /home/{0}/.Renviron'''.format(args.os_user), shell=True, check=True)
+    subprocess.run('rm -f metastore_db/db* derby.log', shell=True, check=True)
+
+def run_rscript(rscript_name):
+    subprocess.run('R < /home/{0}/{1}.r --no-save'.format(args.os_user, rscript_name), shell=True, check=True)
+
+
+if __name__ == "__main__":
+    try:
+        prepare_templates()
+        # Running on remote kernel
+        prepare_rscript('/home/{}/test_templates/template_preparation.r'.format(args.os_user), 'preparation', 'remote')
+        run_rscript('preparation')
+        prepare_rscript('/home/{}/test_templates/template_visualization.r'.format(args.os_user), 'visualization', 'remote')
+        run_rscript('visualization')
+        # Running on local kernel
+        enable_local_kernel()
+        prepare_rscript('/home/{}/test_templates/template_preparation.r'.format(args.os_user), 'preparation', 'local')
+        prepare_rscript('/home/{}/test_templates/template_visualization.r'.format(args.os_user), 'visualization', 'local')
+        run_rscript('preparation')
+        run_rscript('visualization')
+    except Exception as err:
+        print('Error!', str(err))
+        sys.exit(1)
+
+    sys.exit(0)
diff --git a/integration-tests/examples/scenario_tensor/tensor_tests.py b/integration-tests/examples/scenario_tensor/tensor_tests.py
new file mode 100644
index 0000000..72487dc
--- /dev/null
+++ b/integration-tests/examples/scenario_tensor/tensor_tests.py
@@ -0,0 +1,91 @@
+#!/usr/bin/python3
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#   http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+import os, sys, json
+from fabric import *
+import argparse
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--storage', type=str, default='')
+parser.add_argument('--cloud', type=str, default='')
+parser.add_argument('--os_user', type=str, default='')
+parser.add_argument('--cluster_name', type=str, default='')
+parser.add_argument('--azure_storage_account', type=str, default='')
+parser.add_argument('--azure_datalake_account', type=str, default='')
+args = parser.parse_args()
+
+
+def prepare_templates():
+    try:
+        subprocess.run('/bin/bash -c "source /etc/profile && wget http://files.fast.ai/data/dogscats.zip -O /tmp/dogscats.zip"', shell=True, check=True)
+        subprocess.run('unzip -q /tmp/dogscats.zip -d /tmp', shell=True, check=True)
+        subprocess.run('/bin/bash -c "mkdir -p /home/{0}/{1}"'.format(args.os_user, "{test,train}"), shell=True, check=True)
+        subprocess.run('mv /tmp/dogscats/test1/* /home/{0}/test'.format(args.os_user), shell=True, check=True)
+        subprocess.run('/bin/bash -c "mv /tmp/dogscats/valid/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user), shell=True, check=True)
+        subprocess.run('/bin/bash -c "mv /tmp/dogscats/train/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user), shell=True, check=True)
+    except Exception as err:
+        print('Failed to download/unpack image dataset!', str(err))
+        sys.exit(1)
+    subprocess.run('mkdir -p /home/{0}/logs'.format(args.os_user), shell=True, check=True)
+    subprocess.run('mv /tmp/tensor /home/{0}/test_templates'.format(args.os_user), shell=True, check=True)
+
+def get_storage():
+    storages = {"aws": args.storage,
+                "azure": "{0}@{1}.blob.core.windows.net".format(args.storage, args.azure_storage_account),
+                "gcp": args.storage}
+    protocols = {"aws": "s3a", "azure": "wasbs", "gcp": "gs"}
+    if args.azure_datalake_account:
+        storages['azure'] = "{0}.azuredatalakestore.net/{1}".format(args.azure_datalake_account, args.storage)
+        protocols['azure'] = 'adl'
+    return (storages[args.cloud], protocols[args.cloud])
+
+def prepare_ipynb(kernel_name, template_path, ipynb_name):
+    with open(template_path, 'r') as f:
+        text = f.read()
+    text = text.replace('KERNEL_NAME', kernel_name)
+    with open('/home/{}/{}.ipynb'.format(args.os_user, ipynb_name), 'w') as f:
+        f.write(text)
+
+def run_ipynb(ipynb_name):
+    subprocess.run('''bash -l -c 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64; ''' \
+            '''jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb' '''.format(args.os_user, ipynb_name), shell=True, check=True)
+
+def run_tensor():
+    interpreters = ['pyspark_local']
+    for i in interpreters:
+        prepare_ipynb(i, '/home/{}/test_templates/template_preparation_tensor.ipynb'.format(args.os_user), 'preparation_tensor')
+        run_ipynb('preparation_tensor')
+        prepare_ipynb(i, '/home/{}/test_templates/template_visualization_tensor.ipynb'.format(args.os_user), 'visualization_tensor')
+        run_ipynb('visualization_tensor')
+
+
+if __name__ == "__main__":
+    try:
+        prepare_templates()
+        run_tensor()
+    except Exception as err:
+        print('Error!', str(err))
+        sys.exit(1)
+
+    sys.exit(0)
\ No newline at end of file
diff --git a/integration-tests/examples/scenario_zeppelin/zeppelin_tests.py b/integration-tests/examples/scenario_zeppelin/zeppelin_tests.py
new file mode 100644
index 0000000..b3a3215
--- /dev/null
+++ b/integration-tests/examples/scenario_zeppelin/zeppelin_tests.py
@@ -0,0 +1,180 @@
+#!/usr/bin/python3
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#   http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+import os, sys, json
+from fabric import *
+import argparse
+import requests
+import subprocess
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--storage', type=str, default='')
+parser.add_argument('--cloud', type=str, default='')
+parser.add_argument('--os_user', type=str, default='')
+parser.add_argument('--cluster_name', type=str, default='')
+parser.add_argument('--azure_storage_account', type=str, default='')
+parser.add_argument('--azure_datalake_account', type=str, default='')
+args = parser.parse_args()
+
+
+def prepare_templates():
+    subprocess.run('mv /tmp/zeppelin /home/{0}/test_templates'.format(args.os_user), shell=True, check=True)
+
+def get_storage():
+    storages = {"aws": args.storage,
+                "azure": "{0}@{1}.blob.core.windows.net".format(args.storage, args.azure_storage_account),
+                "gcp": args.storage}
+    protocols = {"aws": "s3a", "azure": "wasbs", "gcp": "gs"}
+    if args.azure_datalake_account:
+        storages['azure'] = "{0}.azuredatalakestore.net/{1}".format(args.azure_datalake_account, args.storage)
+        protocols['azure'] = 'adl'
+    return (storages[args.cloud], protocols[args.cloud])
+
+def get_note_status(note_id, notebook_ip):
+    running = False
+    subprocess.run('sleep 5', shell=True, check=True)
+    response = requests.get('http://{0}:8080/api/notebook/job/{1}'.format(notebook_ip, note_id))
+    status = json.loads(response.content)
+    for i in status.get('body'):
+        if i.get('status') == "RUNNING" or i.get('status') == "PENDING":
+            print('Notebook status: {}'.format(i.get('status')))
+            running = True
+        elif i.get('status') == "ERROR":
+            print('Error in notebook')
+            sys.exit(1)
+    if running:
+        subprocess.run('sleep 5', shell=True, check=True)
+        get_note_status(note_id, notebook_ip)
+    else:
+        return "OK"
+
+def import_note(note_path, notebook_ip):
+    headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Expires': '0'}
+    response = requests.post('http://{0}:8080/api/notebook/import'.format(notebook_ip), data=open(note_path, 'rb'), headers=headers)
+    status = json.loads(response.content)
+    if status.get('status') == 'OK':
+        print('Imported notebook: {}'.format(note_path))
+        return status.get('body')
+    else:
+        print('Failed to import notebook')
+        sys.exit(1)
+
+def prepare_note(interpreter_name, template_path, note_name):
+    with open(template_path, 'r') as f:
+        text = f.read()
+    text = text.replace('INTERPRETER_NAME', interpreter_name)
+    text = text.replace('WORKING_STORAGE', get_storage()[0])
+    text = text.replace('PROTOCOL_NAME', get_storage()[1])
+    with open(note_name, 'w') as f:
+        f.write(text)
+
+def run_note(note_id, notebook_ip):
+    response = requests.post('http://{0}:8080/api/notebook/job/{1}'.format(notebook_ip, note_id))
+    status = json.loads(response.content)
+    if status.get('status') == 'OK':
+        get_note_status(note_id, notebook_ip)
+    else:
+        print('Failed to run notebook')
+        sys.exit(1)
+
+def remove_note(note_id, notebook_ip):
+    response = requests.delete('http://{0}:8080/api/notebook/{1}'.format(notebook_ip, note_id))
+    status = json.loads(response.content)
+    if status.get('status') == 'OK':
+        return "OK"
+    else:
+        sys.exit(1)
+
+def restart_interpreter(notebook_ip, interpreter):
+    response = requests.get('http://{0}:8080/api/interpreter/setting'.format(notebook_ip))
+    status = json.loads(response.content)
+    if status.get('status') == 'OK':
+        id = [i['id'] for i in status['body'] if i['name'] in interpreter][0]
+        response = requests.put('http://{0}:8080/api/interpreter/setting/restart/{1}'.format(notebook_ip, id))
+        status = json.loads(response.content)
+        if status.get('status') == 'OK':
+            subprocess.run('sleep 5', shell=True, check=True)
+            return "OK"
+        else:
+            print('Failed to restart interpreter')
+            sys.exit(1)
+    else:
+        print('Failed to get interpreter settings')
+        sys.exit(1)
+
+def run_pyspark():
+    interpreters = ['local_interpreter_python2.pyspark', args.cluster_name + "_py2.pyspark"]
+    for i in interpreters:
+        prepare_note(i, '/home/{}/test_templates/template_preparation_pyspark.json'.format(args.os_user),
+                     '/home/{}/preparation_pyspark.json'.format(args.os_user))
+        note_id = import_note('/home/{}/preparation_pyspark.json'.format(args.os_user), notebook_ip)
+        run_note(note_id, notebook_ip)
+        remove_note(note_id, notebook_ip)
+        prepare_note(i, '/home/{}/test_templates/template_visualization_pyspark.json'.format(args.os_user),
+                     '/home/{}/visualization_pyspark.json'.format(args.os_user))
+        note_id = import_note('/home/{}/visualization_pyspark.json'.format(args.os_user), notebook_ip)
+        run_note(note_id, notebook_ip)
+        remove_note(note_id, notebook_ip)
+        restart_interpreter(notebook_ip, i)
+
+def run_sparkr():
+    if os.path.exists('/opt/livy/'):
+        interpreters = ['local_interpreter_python2.sparkr', args.cluster_name + "_py2.sparkr"]
+    else:
+        interpreters = ['local_interpreter_python2.r', args.cluster_name + "_py2.r"]
+    for i in interpreters:
+        prepare_note(i, '/home/{}/test_templates/template_preparation_sparkr.json'.format(args.os_user),
+                     '/home/{}/preparation_sparkr.json'.format(args.os_user))
+        note_id = import_note('/home/{}/preparation_sparkr.json'.format(args.os_user), notebook_ip)
+        run_note(note_id, notebook_ip)
+        remove_note(note_id, notebook_ip)
+        prepare_note(i, '/home/{}/test_templates/template_visualization_sparkr.json'.format(args.os_user),
+                     '/home/{}/visualization_sparkr.json'.format(args.os_user))
+        note_id = import_note('/home/{}/visualization_sparkr.json'.format(args.os_user), notebook_ip)
+        run_note(note_id, notebook_ip)
+        remove_note(note_id, notebook_ip)
+        restart_interpreter(notebook_ip, i)
+
+def run_spark():
+    interpreters = ['local_interpreter_python2.spark', args.cluster_name + "_py2.spark"]
+    for i in interpreters:
+        prepare_note(i, '/home/{}/test_templates/template_preparation_spark.json'.format(args.os_user),
+                     '/home/{}/preparation_spark.json'.format(args.os_user))
+        note_id = import_note('/home/{}/preparation_spark.json'.format(args.os_user), notebook_ip)
+        run_note(note_id, notebook_ip)
+        remove_note(note_id, notebook_ip)
+        restart_interpreter(notebook_ip, i)
+
+
+if __name__ == "__main__":
+    try:
+        notebook_ip = subprocess.run('hostname -I', capture_output=True, shell=True, check=True).stdout.decode('UTF-8').rstrip("\n\r")
+        prepare_templates()
+        run_pyspark()
+        run_sparkr()
+        run_spark()
+    except Exception as err:
+        print('Error!', str(err))
+        sys.exit(1)
+
+    sys.exit(0)
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/deeplearning/lib_groups.json b/integration-tests/examples/test_libs/deeplearning/lib_groups.json
new file mode 100644
index 0000000..b77b8c2
--- /dev/null
+++ b/integration-tests/examples/test_libs/deeplearning/lib_groups.json
@@ -0,0 +1,5 @@
+[
+  "pip3",
+  "others",
+  "os_pkg"
+]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/deeplearning/lib_list.json b/integration-tests/examples/test_libs/deeplearning/lib_list.json
new file mode 100644
index 0000000..d360bd8
--- /dev/null
+++ b/integration-tests/examples/test_libs/deeplearning/lib_list.json
@@ -0,0 +1,14 @@
+[
+  {
+    "group": "os_pkg",
+    "start_with": "py"
+  },
+  {
+    "group": "others",
+    "start_with": "gh"
+  },
+  {
+    "group": "pip3",
+    "start_with": "sp"
+  }
+]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/jupyter/lib_groups.json b/integration-tests/examples/test_libs/jupyter/lib_groups.json
new file mode 100644
index 0000000..87e47a5
--- /dev/null
+++ b/integration-tests/examples/test_libs/jupyter/lib_groups.json
@@ -0,0 +1,6 @@
+[
+  "pip3",
+  "others",
+  "os_pkg",
+  "r_pkg"
+]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/jupyter/lib_list.json b/integration-tests/examples/test_libs/jupyter/lib_list.json
new file mode 100644
index 0000000..5590429
--- /dev/null
+++ b/integration-tests/examples/test_libs/jupyter/lib_list.json
@@ -0,0 +1,18 @@
+[
+  {
+    "group": "os_pkg",
+    "start_with": "py"
+  },
+  {
+    "group": "others",
+    "start_with": "gh"
+  },
+  {
+    "group": "r_pkg",
+    "start_with": "sp"
+  },
+  {
+    "group": "pip3",
+    "start_with": "sp"
+  }
+]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/lib_groups.json b/integration-tests/examples/test_libs/lib_groups.json
new file mode 100644
index 0000000..5cd7bfe
--- /dev/null
+++ b/integration-tests/examples/test_libs/lib_groups.json
@@ -0,0 +1,5 @@
+[
+  "pip3",
+  "os_pkg",
+  "r_pkg"
+]
diff --git a/integration-tests/examples/test_libs/lib_list.json b/integration-tests/examples/test_libs/lib_list.json
new file mode 100644
index 0000000..18385fc
--- /dev/null
+++ b/integration-tests/examples/test_libs/lib_list.json
@@ -0,0 +1,6 @@
+[
+  {
+    "group": "os_pkg",
+    "start_with": "py"
+  }
+]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/rstudio/lib_groups.json b/integration-tests/examples/test_libs/rstudio/lib_groups.json
new file mode 100644
index 0000000..87e47a5
--- /dev/null
+++ b/integration-tests/examples/test_libs/rstudio/lib_groups.json
@@ -0,0 +1,6 @@
+[
+  "pip3",
+  "others",
+  "os_pkg",
+  "r_pkg"
+]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/rstudio/lib_list.json b/integration-tests/examples/test_libs/rstudio/lib_list.json
new file mode 100644
index 0000000..5590429
--- /dev/null
+++ b/integration-tests/examples/test_libs/rstudio/lib_list.json
@@ -0,0 +1,18 @@
+[
+  {
+    "group": "os_pkg",
+    "start_with": "py"
+  },
+  {
+    "group": "others",
+    "start_with": "gh"
+  },
+  {
+    "group": "r_pkg",
+    "start_with": "sp"
+  },
+  {
+    "group": "pip3",
+    "start_with": "sp"
+  }
+]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/tensor/lib_groups.json b/integration-tests/examples/test_libs/tensor/lib_groups.json
new file mode 100644
index 0000000..b77b8c2
--- /dev/null
+++ b/integration-tests/examples/test_libs/tensor/lib_groups.json
@@ -0,0 +1,5 @@
+[
+  "pip3",
+  "others",
+  "os_pkg"
+]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/tensor/lib_list.json b/integration-tests/examples/test_libs/tensor/lib_list.json
new file mode 100644
index 0000000..d360bd8
--- /dev/null
+++ b/integration-tests/examples/test_libs/tensor/lib_list.json
@@ -0,0 +1,14 @@
+[
+  {
+    "group": "os_pkg",
+    "start_with": "py"
+  },
+  {
+    "group": "others",
+    "start_with": "gh"
+  },
+  {
+    "group": "pip3",
+    "start_with": "sp"
+  }
+]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/zeppelin/lib_groups.json b/integration-tests/examples/test_libs/zeppelin/lib_groups.json
new file mode 100644
index 0000000..87e47a5
--- /dev/null
+++ b/integration-tests/examples/test_libs/zeppelin/lib_groups.json
@@ -0,0 +1,6 @@
+[
+  "pip3",
+  "others",
+  "os_pkg",
+  "r_pkg"
+]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/zeppelin/lib_list.json b/integration-tests/examples/test_libs/zeppelin/lib_list.json
new file mode 100644
index 0000000..b443274
--- /dev/null
+++ b/integration-tests/examples/test_libs/zeppelin/lib_list.json
@@ -0,0 +1,19 @@
+[
+  {
+    "group": "os_pkg",
+    "start_with": "py"
+  },
+  {
+    "group": "others",
+    "start_with": "gh"
+  },
+  {
+    "group": "r_pkg",
+    "start_with": "sp"
+  },
+
+  {
+    "group": "pip3",
+    "start_with": "sp"
+  }
+]
\ No newline at end of file
diff --git a/integration-tests/examples/test_templates/README.txt b/integration-tests/examples/test_templates/README.txt
new file mode 100644
index 0000000..1ee4d9f
--- /dev/null
+++ b/integration-tests/examples/test_templates/README.txt
@@ -0,0 +1,2 @@
+Copy those directories to S3 bucket from which these templates will be copied.
+The source bucket name is specified in config.properties file.
\ No newline at end of file
diff --git a/integration-tests/examples/test_templates/deeplearning/cat_gray.jpg b/integration-tests/examples/test_templates/deeplearning/cat_gray.jpg
new file mode 100644
index 0000000..43c5ce3
Binary files /dev/null and b/integration-tests/examples/test_templates/deeplearning/cat_gray.jpg differ
diff --git a/integration-tests/examples/test_templates/deeplearning/conv.prototxt b/integration-tests/examples/test_templates/deeplearning/conv.prototxt
new file mode 100644
index 0000000..0343891
--- /dev/null
+++ b/integration-tests/examples/test_templates/deeplearning/conv.prototxt
@@ -0,0 +1,48 @@
+# *****************************************************************************
+#
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing,
+#  software distributed under the License is distributed on an
+#  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+#  KIND, either express or implied.  See the License for the
+#  specific language governing permissions and limitations
+#  under the License.
+#
+# ******************************************************************************
+
+# Simple single-layer network to showcase editing model parameters.
+name: "convolution"
+layer {
+  name: "data"
+  type: "Input"
+  top: "data"
+  input_param { shape: { dim: 1 dim: 1 dim: 100 dim: 100 } }
+}
+layer {
+  name: "conv"
+  type: "Convolution"
+  bottom: "data"
+  top: "conv"
+  convolution_param {
+    num_output: 3
+    kernel_size: 5
+    stride: 1
+    weight_filler {
+      type: "gaussian"
+      std: 0.01
+    }
+    bias_filler {
+      type: "constant"
+      value: 0
+    }
+  }
+}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_caffe.ipynb b/integration-tests/examples/test_templates/deeplearning/template_caffe.ipynb
new file mode 100644
index 0000000..11457fc
--- /dev/null
+++ b/integration-tests/examples/test_templates/deeplearning/template_caffe.ipynb
@@ -0,0 +1,99 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt\n",
+    "%matplotlib inline\n",
+    "\n",
+    "# Make sure that caffe is on the python path:\n",
+    "caffe_root = './'  # this file is expected to be in {caffe_root}/examples\n",
+    "import sys\n",
+    "sys.path.insert(0, caffe_root + 'python')\n",
+    "\n",
+    "import caffe\n",
+    "\n",
+    "# configure plotting\n",
+    "plt.rcParams['figure.figsize'] = (10, 10)\n",
+    "plt.rcParams['image.interpolation'] = 'nearest'\n",
+    "plt.rcParams['image.cmap'] = 'gray'"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Load the net, list its data and params, and filter an example image.\n",
+    "caffe.set_mode_gpu()\n",
+    "net = caffe.Net('test_templates/conv.prototxt', caffe.TEST)\n",
+    "print(\"blobs {}\\nparams {}\".format(net.blobs.keys(), net.params.keys()))\n",
+    "\n",
+    "# load image and prepare as a single input batch for Caffe\n",
+    "im = np.array(caffe.io.load_image('test_templates/cat_gray.jpg', color=False)).squeeze()\n",
+    "plt.title(\"original image\")\n",
+    "plt.imshow(im)\n",
+    "plt.axis('off')\n",
+    "\n",
+    "im_input = im[np.newaxis, np.newaxis, :, :]\n",
+    "net.blobs['data'].reshape(*im_input.shape)\n",
+    "net.blobs['data'].data[...] = im_input"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# helper show filter outputs\n",
+    "def show_filters(net):\n",
+    "    net.forward()\n",
+    "    plt.figure()\n",
+    "    filt_min, filt_max = net.blobs['conv'].data.min(), net.blobs['conv'].data.max()\n",
+    "    for i in range(3):\n",
+    "        plt.subplot(1,4,i+2)\n",
+    "        plt.title(\"filter #{} output\".format(i))\n",
+    "        plt.imshow(net.blobs['conv'].data[0, i], vmin=filt_min, vmax=filt_max)\n",
+    "        plt.tight_layout()\n",
+    "        plt.axis('off')\n",
+    "\n",
+    "# filter the image with initial \n",
+    "show_filters(net)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.13"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 1
+}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_caffe2.ipynb b/integration-tests/examples/test_templates/deeplearning/template_caffe2.ipynb
new file mode 100644
index 0000000..f771e33
--- /dev/null
+++ b/integration-tests/examples/test_templates/deeplearning/template_caffe2.ipynb
@@ -0,0 +1,619 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Caffe2 Basic Concepts - Operators & Nets\n",
+    "\n",
+    "In this tutorial we will go through a set of Caffe2 basics: the basic concepts including how operators and nets are being written.\n",
+    "\n",
+    "First, let's import caffe2. `core` and `workspace` are usually the two that you need most. If you want to manipulate protocol buffers generated by caffe2, you probably also want to import `caffe2_pb2` from `caffe2.proto`."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# We'll also import a few standard python libraries\n",
+    "from matplotlib import pyplot\n",
+    "import numpy as np\n",
+    "import time\n",
+    "\n",
+    "# These are the droids you are looking for.\n",
+    "from caffe2.python import core, workspace\n",
+    "from caffe2.proto import caffe2_pb2\n",
+    "\n",
+    "# Let's show all plots inline.\n",
+    "%matplotlib inline"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "You might see a warning saying that caffe2 does not have GPU support. That means you are running a CPU-only build. Don't be alarmed - anything CPU is still runnable without problem."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {
+    "collapsed": true
+   },
+   "source": [
+    "## Workspaces\n",
+    "\n",
+    "Let's cover workspaces first, where all the data reside.\n",
+    "\n",
+    "If you are familiar with Matlab, workspace consists of blobs you create and store in memory. For now, consider a blob to be a N-dimensional Tensor similar to numpy's ndarray, but is contiguous. Down the road, we will show you that a blob is actually a typed pointer that can store any type of C++ objects, but Tensor is the most common type stored in a blob. Let's show what the interface looks like.\n",
+    "\n",
+    "`Blobs()` prints out all existing blobs in the workspace. \n",
+    "`HasBlob()` queries if a blob exists in the workspace. For now, we don't have anything yet."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "print(\"Current blobs in the workspace: {}\".format(workspace.Blobs()))\n",
+    "print(\"Workspace has blob 'X'? {}\".format(workspace.HasBlob(\"X\")))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "We can feed blobs into the workspace using `FeedBlob()`."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "X = np.random.randn(2, 3).astype(np.float32)\n",
+    "print(\"Generated X from numpy:\\n{}\".format(X))\n",
+    "workspace.FeedBlob(\"X\", X)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Now, let's take a look what blobs there are in the workspace."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [],
+   "source": [
+    "print(\"Current blobs in the workspace: {}\".format(workspace.Blobs()))\n",
+    "print(\"Workspace has blob 'X'? {}\".format(workspace.HasBlob(\"X\")))\n",
+    "print(\"Fetched X:\\n{}\".format(workspace.FetchBlob(\"X\")))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Let's verify that the arrays are equal."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "np.testing.assert_array_equal(X, workspace.FetchBlob(\"X\"))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Also, if you are trying to access a blob that does not exist, an error will be thrown:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "try:\n",
+    "    workspace.FetchBlob(\"invincible_pink_unicorn\")\n",
+    "except RuntimeError as err:\n",
+    "    print(err)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "One thing that you might not use immediately: you can have multiple workspaces in Python using different names, and switch between them. Blobs in different workspaces are separate from each other. You can query the current workspace using `CurrentWorkspace`. Let's try switching the workspace by name (gutentag) and creating a new one if it doesn't exist."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "print(\"Current workspace: {}\".format(workspace.CurrentWorkspace()))\n",
+    "print(\"Current blobs in the workspace: {}\".format(workspace.Blobs()))\n",
+    "\n",
+    "# Switch the workspace. The second argument \"True\" means creating \n",
+    "# the workspace if it is missing.\n",
+    "workspace.SwitchWorkspace(\"gutentag\", True)\n",
+    "\n",
+    "# Let's print the current workspace. Note that there is nothing in the\n",
+    "# workspace yet.\n",
+    "print(\"Current workspace: {}\".format(workspace.CurrentWorkspace()))\n",
+    "print(\"Current blobs in the workspace: {}\".format(workspace.Blobs()))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Let's switch back to the default workspace."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "workspace.SwitchWorkspace(\"default\")\n",
+    "print(\"Current workspace: {}\".format(workspace.CurrentWorkspace()))\n",
+    "print(\"Current blobs in the workspace: {}\".format(workspace.Blobs()))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Finally, `ResetWorkspace()` clears anything that is in the current workspace."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "workspace.ResetWorkspace()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Operators\n",
+    "\n",
+    "Operators in Caffe2 are kind of like functions. From the C++ side, they all derive from a common interface, and are registered by type, so that we can call different operators during runtime. The interface of operators is defined in `caffe2/proto/caffe2.proto`. Basically, it takes in a bunch of inputs, and produces a bunch of outputs.\n",
+    "\n",
+    "Remember, when we say \"create an operator\" in Caffe2 Python, nothing gets run yet. All it does is to create the protocol buffere that specifies what the operator should be. At a later time it will be sent to the C++ backend for execution. If you are not familiar with protobuf, it is a json-like serialization tool for structured data. Find more about protocol buffers [here](https://developers.google.com/protocol-buffers/).\n",
+    "\n",
+    "Let's see an actual example."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Create an operator.\n",
+    "op = core.CreateOperator(\n",
+    "    \"Relu\", # The type of operator that we want to run\n",
+    "    [\"X\"], # A list of input blobs by their names\n",
+    "    [\"Y\"], # A list of output blobs by their names\n",
+    ")\n",
+    "# and we are done!"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "As we mentioned, the created op is actually a protobuf object. Let's show the content."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [],
+   "source": [
+    "print(\"Type of the created op is: {}\".format(type(op)))\n",
+    "print(\"Content:\\n\")\n",
+    "print(str(op))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "OK, let's run the operator. We first feed in the input X to the workspace. \n",
+    "Then the simplest way to run an operator is to do `workspace.RunOperatorOnce(operator)`"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "workspace.FeedBlob(\"X\", np.random.randn(2, 3).astype(np.float32))\n",
+    "workspace.RunOperatorOnce(op)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "After execution, let's see if the operator is doing the right thing, which is our neural network's activation function ([Relu](https://en.wikipedia.org/wiki/Rectifier_(neural_networks))) in this case."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "print(\"Current blobs in the workspace: {}\\n\".format(workspace.Blobs()))\n",
+    "print(\"X:\\n{}\\n\".format(workspace.FetchBlob(\"X\")))\n",
+    "print(\"Y:\\n{}\\n\".format(workspace.FetchBlob(\"Y\")))\n",
+    "print(\"Expected:\\n{}\\n\".format(np.maximum(workspace.FetchBlob(\"X\"), 0)))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "This is working if your Expected output matches your Y output in this example.\n",
+    "\n",
+    "Operators also take optional arguments if needed. They are specified as key-value pairs. Let's take a look at one simple example, which takes a tensor and fills it with Gaussian random variables."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [],
+   "source": [
+    "op = core.CreateOperator(\n",
+    "    \"GaussianFill\",\n",
+    "    [], # GaussianFill does not need any parameters.\n",
+    "    [\"Z\"],\n",
+    "    shape=[100, 100], # shape argument as a list of ints.\n",
+    "    mean=1.0,  # mean as a single float\n",
+    "    std=1.0, # std as a single float\n",
+    ")\n",
+    "print(\"Content of op:\\n\")\n",
+    "print(str(op))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Let's run it and see if things are as intended."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "workspace.RunOperatorOnce(op)\n",
+    "temp = workspace.FetchBlob(\"Z\")\n",
+    "pyplot.hist(temp.flatten(), bins=50)\n",
+    "pyplot.title(\"Distribution of Z\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "If you see a bell shaped curve then it worked!"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Nets\n",
+    "\n",
+    "Nets are essentially computation graphs. We keep the name `Net` for backward consistency (and also to pay tribute to neural nets). A Net is composed of multiple operators just like a program written as a sequence of commands. Let's take a look.\n",
+    "\n",
+    "When we talk about nets, we will also talk about BlobReference, which is an object that wraps around a string so we can do easy chaining of operators.\n",
+    "\n",
+    "Let's create a network that is essentially the equivalent of the following python math:\n",
+    "```\n",
+    "X = np.random.randn(2, 3)\n",
+    "W = np.random.randn(5, 3)\n",
+    "b = np.ones(5)\n",
+    "Y = X * W^T + b\n",
+    "```\n",
+    "We'll show the progress step by step. Caffe2's `core.Net` is a wrapper class around a NetDef protocol buffer."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "When creating a network, its underlying protocol buffer is essentially empty other than the network name. Let's create the net and then show the proto content."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [],
+   "source": [
+    "net = core.Net(\"my_first_net\")\n",
+    "print(\"Current network proto:\\n\\n{}\".format(net.Proto()))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Let's create a blob called X, and use GaussianFill to fill it with some random data."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [],
+   "source": [
+    "X = net.GaussianFill([], [\"X\"], mean=0.0, std=1.0, shape=[2, 3], run_once=0)\n",
+    "print(\"New network proto:\\n\\n{}\".format(net.Proto()))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {
+    "collapsed": true
+   },
+   "source": [
+    "You might have observed a few differences from the earlier `core.CreateOperator` call. Basically, when we have a net, you can direct create an operator *and* add it to the net at the same time using Python tricks: essentially, if you call `net.SomeOp` where SomeOp is a registered type string of an operator, this essentially gets translated to\n",
+    "```\n",
+    "op = core.CreateOperator(\"SomeOp\", ...)\n",
+    "net.Proto().op.append(op)\n",
+    "```\n",
+    "\n",
+    "Also, you might be wondering what X is. X is a `BlobReference` which basically records two things:\n",
+    "- what its name is. You can access the name by str(X)\n",
+    "- which net it gets created from. It is recorded by an internal variable `_from_net`, but most likely\n",
+    "you won't need that.\n",
+    "\n",
+    "Let's verify it. Also, remember, we are not actually running anything yet, so X contains nothing but a symbol. Don't expect to get any numerical values out of it right now :)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [],
+   "source": [
+    "print(\"Type of X is: {}\".format(type(X)))\n",
+    "print(\"The blob name is: {}\".format(str(X)))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Let's continue to create W and b."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "W = net.GaussianFill([], [\"W\"], mean=0.0, std=1.0, shape=[5, 3], run_once=0)\n",
+    "b = net.ConstantFill([], [\"b\"], shape=[5,], value=1.0, run_once=0)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Now, one simple code sugar: since the BlobReference objects know what net it is generated from, in addition to creating operators from net, you can also create operators from BlobReferences. Let's create the FC operator in this way."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "Y = X.FC([W, b], [\"Y\"])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Under the hood, `X.FC(...)` simply delegates to `net.FC` by inserting `X` as the first input of the corresponding operator, so what we did above is equivalent to\n",
+    "```\n",
+    "Y = net.FC([X, W, b], [\"Y\"])\n",
+    "```\n",
+    "\n",
+    "Let's take a look at the current network."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "print(\"Current network proto:\\n\\n{}\".format(net.Proto()))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Too verbose huh? Let's try to visualize it as a graph. Caffe2 ships with a very minimal graph visualization tool for this purpose. Let's show that in ipython."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from caffe2.python import net_drawer\n",
+    "from IPython import display\n",
+    "graph = net_drawer.GetPydotGraph(net, rankdir=\"LR\")\n",
+    "display.Image(graph.create_png(), width=800)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "So we have defined a `Net`, but nothing gets executed yet. Remember that the net above is essentially a protobuf that holds the definition of the network. When we actually want to run the network, what happens under the hood is:\n",
+    "- Instantiate a C++ net object from the protobuf;\n",
+    "- Call the instantiated net's Run() function.\n",
+    "\n",
+    "Before we do anything, we should clear any earlier workspace variables with `ResetWorkspace()`.\n",
+    "\n",
+    "Then there are two ways to run a net from Python. We will do the first option in the example below.\n",
+    "\n",
+    "1. Using `workspace.RunNetOnce()`, which instantiates, runs and immediately destructs the network. \n",
+    "2. A little bit more complex and involves two steps: \n",
+    "    (a) call `workspace.CreateNet()` to create the C++ net object owned by the workspace, and\n",
+    "    (b) use `workspace.RunNet()` by passing the name of the network to it.\n",
+    "    \n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [],
+   "source": [
+    "workspace.ResetWorkspace()\n",
+    "print(\"Current blobs in the workspace: {}\".format(workspace.Blobs()))\n",
+    "workspace.RunNetOnce(net)\n",
+    "print(\"Blobs in the workspace after execution: {}\".format(workspace.Blobs()))\n",
+    "# Let's dump the contents of the blobs\n",
+    "for name in workspace.Blobs():\n",
+    "    print(\"{}:\\n{}\".format(name, workspace.FetchBlob(name)))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Now let's try the second way to create the net, and run it. First clear the variables with `ResetWorkspace()`, create the net with the workspace's net object you created earlier `CreateNet(net_object)`, and then run the net by name with `RunNet(net_name)`."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "workspace.ResetWorkspace()\n",
+    "print(\"Current blobs in the workspace: {}\".format(workspace.Blobs()))\n",
+    "workspace.CreateNet(net)\n",
+    "workspace.RunNet(net.Proto().name)\n",
+    "print(\"Blobs in the workspace after execution: {}\".format(workspace.Blobs()))\n",
+    "for name in workspace.Blobs():\n",
+    "    print(\"{}:\\n{}\".format(name, workspace.FetchBlob(name)))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "There are a few differences between `RunNetOnce` and `RunNet`, but probably the main difference is the computation time overhead. Since `RunNetOnce` involves serializing the protobuf to pass between Python and C and instantiating the network, it may take longer to run. Let's see in this case what the overhead is."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# It seems that %timeit magic does not work well with\n",
+    "# C++ extensions so we'll basically do for loops\n",
+    "start = time.time()\n",
+    "for i in range(1000):\n",
+    "    workspace.RunNetOnce(net)\n",
+    "end = time.time()\n",
+    "print('Run time per RunNetOnce: {}'.format((end - start) / 1000))\n",
+    "\n",
+    "start = time.time()\n",
+    "for i in range(1000):\n",
+    "    workspace.RunNet(net.Proto().name)\n",
+    "end = time.time()\n",
+    "print('Run time per RunNet: {}'.format((end - start) / 1000))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {
+    "collapsed": true
+   },
+   "source": [
+    "OK, so above are a few key components if you would like to use Caffe2 from the python side. We are going to add more to the tutorial as we find more needs. For now, kindly check out the rest of the tutorials!"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.13"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 1
+}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_cntk.ipynb b/integration-tests/examples/test_templates/deeplearning/template_cntk.ipynb
new file mode 100644
index 0000000..44b7d1b
--- /dev/null
+++ b/integration-tests/examples/test_templates/deeplearning/template_cntk.ipynb
@@ -0,0 +1,78 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from IPython.display import Image\n",
+    "# Figure 1\n",
+    "Image(url=\"https://www.cntk.ai/jup/cancer_data_plot.jpg\", width=400, height=400)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Figure 2\n",
+    "Image(url= \"https://www.cntk.ai/jup/cancer_classify_plot.jpg\", width=400, height=400)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Figure 3\n",
+    "Image(url= \"https://www.cntk.ai/jup/logistic_neuron.jpg\", width=300, height=200)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Import the relevant components\n",
+    "from __future__ import print_function\n",
+    "import numpy as np\n",
+    "import sys\n",
+    "import os\n",
+    "from cntk import *\n",
+    "\n",
+    "# Select the right target device when this notebook is being tested:\n",
+    "if 'TEST_DEVICE' in os.environ:\n",
+    "    import cntk\n",
+    "    if os.environ['TEST_DEVICE'] == 'cpu':\n",
+    "        cntk.device.try_set_default_device(cntk.device.cpu())\n",
+    "    else:\n",
+    "        cntk.device.try_set_default_device(cntk.device.gpu(0))"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.13"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_keras.ipynb b/integration-tests/examples/test_templates/deeplearning/template_keras.ipynb
new file mode 100644
index 0000000..683e1af
--- /dev/null
+++ b/integration-tests/examples/test_templates/deeplearning/template_keras.ipynb
@@ -0,0 +1,64 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from keras.layers import Input, Embedding, LSTM, Dense\n",
+    "from keras.models import Model\n",
+    "\n",
+    "# Headline input: meant to receive sequences of 100 integers, between 1 and 10000.\n",
+    "# Note that we can name any layer by passing it a \"name\" argument.\n",
+    "main_input = Input(shape=(100,), dtype='int32', name='main_input')\n",
+    "\n",
+    "# This embedding layer will encode the input sequence\n",
+    "# into a sequence of dense 512-dimensional vectors.\n",
+    "x = Embedding(output_dim=512, input_dim=10000, input_length=100)(main_input)\n",
+    "\n",
+    "# A LSTM will transform the vector sequence into a single vector,\n",
+    "# containing information about the entire sequence\n",
+    "lstm_out = LSTM(32)(x)\n",
+    "auxiliary_output = Dense(1, activation='sigmoid', name='aux_output')(lstm_out)\n",
+    "auxiliary_input = Input(shape=(5,), name='aux_input')\n",
+    "\n",
+    "# We stack a deep densely-connected network on top\n",
+    "x = Dense(64, activation='relu')(x)\n",
+    "x = Dense(64, activation='relu')(x)\n",
+    "x = Dense(64, activation='relu')(x)\n",
+    "\n",
+    "# And finally we add the main logistic regression layer\n",
+    "main_output = Dense(1, activation='sigmoid', name='main_output')(x)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.13"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_mxnet.ipynb b/integration-tests/examples/test_templates/deeplearning/template_mxnet.ipynb
new file mode 100644
index 0000000..5208ce3
--- /dev/null
+++ b/integration-tests/examples/test_templates/deeplearning/template_mxnet.ipynb
@@ -0,0 +1,108 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import mxnet as mx\n",
+    "a = mx.sym.Variable('a')\n",
+    "b = mx.sym.Variable('b')\n",
+    "c = a + b\n",
+    "(a, b, c)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# elemental wise times\n",
+    "d = a * b  \n",
+    "# matrix multiplication\n",
+    "e = mx.sym.dot(a, b)   \n",
+    "# reshape\n",
+    "f = mx.sym.Reshape(d+e, shape=(1,4))  \n",
+    "# broadcast\n",
+    "g = mx.sym.broadcast_to(f, shape=(2,4))  \n",
+    "mx.viz.plot_network(symbol=g)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "%matplotlib inline\n",
+    "from __future__ import print_function\n",
+    "import os\n",
+    "import time\n",
+    "# set the number of threads you want to use before importing mxnet\n",
+    "os.environ['MXNET_CPU_WORKER_NTHREADS'] = '4'\n",
+    "import mxnet as mx\n",
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# download example images\n",
+    "proxy = os.popen('cat /etc/profile | grep https_proxy | cut -f2 -d\"=\"').read()[:-1]\n",
+    "os.popen('wget -e use_proxy=yes -e http_proxy={} http://data.mxnet.io/data/test_images.tar.gz'.format(proxy)).read()\n",
+    "os.popen('tar -xf test_images.tar.gz').read()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# opencv\n",
+    "import cv2\n",
+    "N = 1000\n",
+    "tic = time.time()\n",
+    "for i in range(N):\n",
+    "    img = cv2.imread('test_images/ILSVRC2012_val_00000001.JPEG', flags=1)\n",
+    "    img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n",
+    "print(N/(time.time()-tic), 'images decoded per second with opencv')\n",
+    "plt.imshow(img); plt.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.13"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_preparation_tensor.ipynb b/integration-tests/examples/test_templates/deeplearning/template_preparation_tensor.ipynb
new file mode 100644
index 0000000..b1eadda
--- /dev/null
+++ b/integration-tests/examples/test_templates/deeplearning/template_preparation_tensor.ipynb
@@ -0,0 +1,285 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import os, cv2, random\n",
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt\n",
+    "%matplotlib inline \n",
+    "from keras.models import Sequential, load_model\n",
+    "from keras.layers import Dropout, Flatten, Convolution2D, MaxPooling2D, Dense, Activation\n",
+    "from keras.optimizers import Adam\n",
+    "from keras.callbacks import Callback, EarlyStopping\n",
+    "from keras.callbacks import BaseLogger, TensorBoard"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Constants definition"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "TRAIN_DIR = '/home/datalab-user/train/'\n",
+    "TEST_DIR = '/home/datalab-user/test/'\n",
+    "ROWS = 128\n",
+    "COLS = 128\n",
+    "CHANNELS = 3\n",
+    "TRAIN_IMAGES_COUNT = 1000\n",
+    "PATH_TO_LOGS = '/home/datalab-user/logs'"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Reading and adjusting images for training"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "all_images = [TRAIN_DIR+i for i in os.listdir(TRAIN_DIR)[:TRAIN_IMAGES_COUNT]]\n",
+    "test_images =  [TEST_DIR+i for i in os.listdir(TEST_DIR)]\n",
+    "random.shuffle(all_images)\n",
+    "test_coeff = int(len (all_images) * .9)\n",
+    "\n",
+    "train_images, test_images = all_images[:test_coeff], all_images[test_coeff:]\n",
+    "\n",
+    "def read_image(file_path):\n",
+    "    img = cv2.imread(file_path, cv2.IMREAD_COLOR)\n",
+    "    return cv2.resize(img, (ROWS, COLS), interpolation=cv2.INTER_CUBIC).reshape(ROWS, COLS, CHANNELS)\n",
+    "\n",
+    "def prepare_data(images):\n",
+    "    count = len(images)\n",
+    "    data = np.ndarray((count, ROWS, COLS, CHANNELS), dtype=np.uint8)\n",
+    "\n",
+    "    for i, image_file in enumerate(images):\n",
+    "        image = read_image(image_file)\n",
+    "        data[i] = image#.T\n",
+    "    return data\n",
+    "\n",
+    "train = prepare_data(train_images)\n",
+    "test = prepare_data(test_images)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Image counts"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "print(\"Train shape: {}\".format(train.shape))\n",
+    "print(\"Test shape: {}\".format(test.shape))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Assigning labels to training images"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "labels = []\n",
+    "for i in train_images:\n",
+    "    if 'dog' in i.split(\"/\")[-1] :\n",
+    "        labels.append(1)\n",
+    "    else:\n",
+    "        labels.append(0)\n",
+    "        \n",
+    "labels_test = []\n",
+    "for i in test_images:\n",
+    "    if 'dog' in i.split(\"/\")[-1] :\n",
+    "        labels_test.append(1)\n",
+    "    else:\n",
+    "        labels_test.append(0)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Building a convnet"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "optimizer = Adam(lr=1e-6)\n",
+    "objective = 'binary_crossentropy'\n",
+    "\n",
+    "def build_model():\n",
+    "    \n",
+    "    model = Sequential()\n",
+    "\n",
+    "    model.add(Convolution2D(32, 3, 3, border_mode='same', input_shape=(ROWS, COLS, 3), activation='relu'))\n",
+    "    model.add(Convolution2D(32, 3, 3, border_mode='same', activation='relu'))\n",
+    "    model.add(MaxPooling2D(pool_size=(2, 2)))\n",
+    "\n",
+    "    model.add(Convolution2D(64, 3, 3, border_mode='same', activation='relu'))\n",
+    "    model.add(Convolution2D(64, 3, 3, border_mode='same', activation='relu'))\n",
+    "    model.add(MaxPooling2D(pool_size=(2, 2)))\n",
+    "    \n",
+    "    model.add(Convolution2D(128, 3, 3, border_mode='same', activation='relu'))\n",
+    "    model.add(Convolution2D(128, 3, 3, border_mode='same', activation='relu'))\n",
+    "    model.add(MaxPooling2D(pool_size=(2, 2)))\n",
+    "\n",
+    "    model.add(Flatten())\n",
+    "    model.add(Dense(256, activation='relu'))\n",
+    "    model.add(Dropout(0.5))\n",
+    "    \n",
+    "    model.add(Dense(1))\n",
+    "    model.add(Activation('sigmoid'))\n",
+    "    \n",
+    "    model.compile(loss=objective, optimizer=optimizer, metrics=['accuracy'])\n",
+    "    return model\n",
+    "\n",
+    "\n",
+    "model = build_model()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Training the model"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "This block takes about 2.5-3 hours to execute if training on whole dataset of 22500 images"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "nb_epoch = 10\n",
+    "batch_size = 16\n",
+    "\n",
+    "class LossHistory(Callback):\n",
+    "    def on_train_begin(self, logs={}):\n",
+    "        self.losses = []\n",
+    "        self.val_losses = []\n",
+    "        \n",
+    "    def on_epoch_end(self, batch, logs={}):\n",
+    "        self.losses.append(logs.get('loss'))\n",
+    "        self.val_losses.append(logs.get('val_loss'))\n",
+    "\n",
+    "early_stopping = EarlyStopping(monitor='val_loss', patience=5, verbose=1, mode='auto')        \n",
+    "        \n",
+    "def train_and_test_model():\n",
+    "    history = LossHistory()\n",
+    "    tensorboard = TensorBoard(log_dir=PATH_TO_LOGS)\n",
+    "    model.fit(train, labels, batch_size=batch_size, nb_epoch=nb_epoch,\n",
+    "              validation_split=0.25, verbose=2, shuffle=True, callbacks=[history, early_stopping, tensorboard])\n",
+    "    \n",
+    "\n",
+    "    predictions = model.predict(test, verbose=2)\n",
+    "    return predictions, history\n",
+    "\n",
+    "predictions, history = train_and_test_model()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Saving the model and weights"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "path_to_model = '/home/datalab-user/model_1000.json'\n",
+    "path_to_weights = '/home/datalab-user/weigths_1000.h5'\n",
+    "\n",
+    "model.save(path_to_model)\n",
+    "model.save_weights(path_to_weights)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Plotting learning curves"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "loss = history.losses\n",
+    "val_loss = history.val_losses\n",
+    "\n",
+    "plt.xlabel('Epochs')\n",
+    "plt.ylabel('Loss')\n",
+    "plt.title('VGG-16 Loss Trend')\n",
+    "plt.plot(loss, 'blue', label='Training Loss')\n",
+    "plt.plot(val_loss, 'green', label='Validation Loss')\n",
+    "plt.xticks(range(0,len(loss))[0::2])\n",
+    "plt.legend()\n",
+    "plt.show()"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.13"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_theano.ipynb b/integration-tests/examples/test_templates/deeplearning/template_theano.ipynb
new file mode 100644
index 0000000..815e6fe
--- /dev/null
+++ b/integration-tests/examples/test_templates/deeplearning/template_theano.ipynb
@@ -0,0 +1,45 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import theano\n",
+    "from theano import tensor\n",
+    "# declare two symbolic floating-point scalars\n",
+    "a = tensor.dscalar()\n",
+    "b = tensor.dscalar()\n",
+    "# create a simple expression\n",
+    "c = a + b\n",
+    "# convert the expression into a callable object that takes (a,b)\n",
+    "# values as input and computes a value for c\n",
+    "f = theano.function([a,b], c)\n",
+    "# bind 1.5 to 'a', 2.5 to 'b', and evaluate 'c'\n",
+    "assert 4.0 == f(1.5, 2.5)"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.13"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_torch.ipynb b/integration-tests/examples/test_templates/deeplearning/template_torch.ipynb
new file mode 100644
index 0000000..0c476d5
--- /dev/null
+++ b/integration-tests/examples/test_templates/deeplearning/template_torch.ipynb
@@ -0,0 +1,73 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "itorch.image({image.lena(), image.lena(), image.lena()})"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "itorch.html('<p><b>Hi there!</b> this is arbitrary HTML</p>')\n",
+    "window_id = itorch.html('<p>This text will be replaced in 2 seconds</p>')\n",
+    "os.execute('sleep 2')\n",
+    "itorch.html('<p>magic!</p>', window_id)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "Plot = require 'itorch.Plot'\n",
+    "x1 = torch.randn(40):mul(100)\n",
+    "y1 = torch.randn(40):mul(100)\n",
+    "x2 = torch.randn(40):mul(100)\n",
+    "y2 = torch.randn(40):mul(100)\n",
+    "x3 = torch.randn(40):mul(200)\n",
+    "y3 = torch.randn(40):mul(200)\n",
+    "\n",
+    "\n",
+    "-- scatter plots\n",
+    "plot = Plot():circle(x1, y1, 'red', 'hi'):circle(x2, y2, 'blue', 'bye'):draw()\n",
+    "plot:circle(x3,y3,'green', 'yolo'):redraw()\n",
+    "plot:title('Scatter Plot Demo'):redraw()\n",
+    "plot:xaxis('length'):yaxis('width'):redraw()\n",
+    "plot:legend(true)\n",
+    "plot:redraw()\n",
+    "-- print(plot:toHTML())\n",
+    "plot:save('out.html')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "plot = Plot():line(x1, y1,'red','example'):legend(true):title('Line Plot Demo'):draw()"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "iTorch",
+   "language": "lua",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "name": "lua",
+   "version": "5.1"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_visualization_tensor.ipynb b/integration-tests/examples/test_templates/deeplearning/template_visualization_tensor.ipynb
new file mode 100644
index 0000000..c233ab2
--- /dev/null
+++ b/integration-tests/examples/test_templates/deeplearning/template_visualization_tensor.ipynb
@@ -0,0 +1,139 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import os, cv2\n",
+    "from keras.models import load_model\n",
+    "import matplotlib.pyplot as plt\n",
+    "%matplotlib inline\n",
+    "import numpy as np\n",
+    "import tensorflow as tf"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Pathes to model and weights"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "path_to_model = '/home/datalab-user/model_1000.json'\n",
+    "path_to_weights = '/home/datalab-user/weigths_1000.h5'"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Loading test images"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "ROWS = 128\n",
+    "COLS = 128\n",
+    "CHANNELS = 3\n",
+    "TEST_DIR = '/home/datalab-user/test/'\n",
+    "all_image_names = os.listdir(TEST_DIR)\n",
+    "all_image_names.sort()\n",
+    "test_images =  [TEST_DIR+i for i in all_image_names[6:11] + all_image_names[19:32] + all_image_names[33:34]]\n",
+    "\n",
+    "def read_image(file_path):\n",
+    "    img = cv2.imread(file_path, cv2.IMREAD_COLOR)\n",
+    "    return cv2.resize(img, (ROWS, COLS), interpolation=cv2.INTER_CUBIC).reshape(ROWS, COLS, CHANNELS)\n",
+    "\n",
+    "def prep_data(images):\n",
+    "    count = len(images)\n",
+    "    data = np.ndarray((count, ROWS, COLS, CHANNELS), dtype=np.uint8)\n",
+    "\n",
+    "    for i, image_file in enumerate(images):\n",
+    "        image = read_image(image_file)\n",
+    "        data[i] = image\n",
+    "    return data\n",
+    "test = prep_data(test_images)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Loading the model and making predictions on test data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "with tf.device('/cpu:0'):\n",
+    "    model = load_model(path_to_model)\n",
+    "    model.load_weights(path_to_weights)\n",
+    "    predictions = model.predict(test, verbose=2)     "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Visualizing results (rendering can take about a minute)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [],
+   "source": [
+    "plt.figure(figsize=(16, 12))\n",
+    "for i in range(0, 12):\n",
+    "    plt.subplot(3, 4, i+1)\n",
+    "    if predictions[i, 0] >= 0.5: \n",
+    "        plt.title('{:.2%} Dog'.format(predictions[i][0]))\n",
+    "    else: \n",
+    "        plt.title('{:.2%} Cat'.format(1-predictions[i][0]))\n",
+    "        \n",
+    "    plt.imshow(cv2.cvtColor(test[i], cv2.COLOR_BGR2RGB))\n",
+    "    plt.axis('off')\n",
+    "plt.show()"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.13"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/integration-tests/examples/test_templates/jupyter/template_preparation_pyspark.ipynb b/integration-tests/examples/test_templates/jupyter/template_preparation_pyspark.ipynb
new file mode 100644
index 0000000..d0360a1
--- /dev/null
+++ b/integration-tests/examples/test_templates/jupyter/template_preparation_pyspark.ipynb
@@ -0,0 +1,198 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Flights data preparation"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "from pyspark.sql import SQLContext\n",
+    "from pyspark.sql import DataFrame\n",
+    "from pyspark.sql import Row\n",
+    "from pyspark.sql.types import *\n",
+    "import pandas as pd\n",
+    "import StringIO\n",
+    "import matplotlib.pyplot as plt\n",
+    "hc = sc._jsc.hadoopConfiguration()\n",
+    "hc.set(\"hive.execution.engine\", \"mr\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Function to parse CSV"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import csv\n",
+    "\n",
+    "def parseCsv(csvStr):\n",
+    "    f = StringIO.StringIO(csvStr)\n",
+    "    reader = csv.reader(f, delimiter=',')\n",
+    "    row = reader.next()\n",
+    "    return row\n",
+    "\n",
+    "scsv = '\"02Q\",\"Titan Airways\"'\n",
+    "row = parseCsv(scsv)\n",
+    "print row[0]\n",
+    "print row[1]\n",
+    "\n",
+    "working_storage = 'WORKING_STORAGE'\n",
+    "output_directory = 'jupyter/py2'\n",
+    "protocol_name = 'PROTOCOL_NAME://'"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Parse and convert Carrier data to parquet"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "carriersHeader = 'Code,Description'\n",
+    "carriersText = sc.textFile(protocol_name + working_storage + \"/jupyter_dataset/carriers.csv\").filter(lambda x: x != carriersHeader)\n",
+    "carriers = carriersText.map(lambda s: parseCsv(s)) \\\n",
+    "    .map(lambda s: Row(code=s[0], description=s[1])).cache().toDF()\n",
+    "carriers.write.mode(\"overwrite\").parquet(protocol_name + working_storage + \"/\" + output_directory + \"/carriers\")    \n",
+    "sqlContext.registerDataFrameAsTable(carriers, \"carriers\")\n",
+    "carriers.limit(20).toPandas()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Parse and convert to parquet Airport data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "airportsHeader= '\"iata\",\"airport\",\"city\",\"state\",\"country\",\"lat\",\"long\"'\n",
+    "airports = sc.textFile(protocol_name + working_storage + \"/jupyter_dataset/airports.csv\") \\\n",
+    "    .filter(lambda x: x != airportsHeader) \\\n",
+    "    .map(lambda s: parseCsv(s)) \\\n",
+    "    .map(lambda p: Row(iata=p[0], \\\n",
+    "                       airport=p[1], \\\n",
+    "                       city=p[2], \\\n",
+    "                       state=p[3], \\\n",
+    "                       country=p[4], \\\n",
+    "                       lat=float(p[5]), \\\n",
+    "                       longt=float(p[6])) \\\n",
+    "        ).cache().toDF()\n",
+    "airports.write.mode(\"overwrite\").parquet(protocol_name + working_storage + \"/\" + output_directory + \"/airports\")    \n",
+    "sqlContext.registerDataFrameAsTable(airports, \"airports\")\n",
+    "airports.limit(20).toPandas()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Parse and convert Flights data to parquet"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "flightsHeader = 'Year,Month,DayofMonth,DayOfWeek,DepTime,CRSDepTime,ArrTime,CRSArrTime,UniqueCarrier,FlightNum,TailNum,ActualElapsedTime,CRSElapsedTime,AirTime,ArrDelay,DepDelay,Origin,Dest,Distance,TaxiIn,TaxiOut,Cancelled,CancellationCode,Diverted,CarrierDelay,WeatherDelay,NASDelay,SecurityDelay,LateAircraftDelay'\n",
+    "flights = sc.textFile(protocol_name + working_storage + \"/jupyter_dataset/2008.csv.bz2\") \\\n",
+    "    .filter(lambda x: x!= flightsHeader) \\\n",
+    "    .map(lambda s: parseCsv(s)) \\\n",
+    "    .map(lambda p: Row(Year=int(p[0]), \\\n",
+    "                       Month=int(p[1]), \\\n",
+    "                       DayofMonth=int(p[2]), \\\n",
+    "                       DayOfWeek=int(p[3]), \\\n",
+    "                       DepTime=p[4], \\\n",
+    "                       CRSDepTime=p[5], \\\n",
+    "                       ArrTime=p[6], \\\n",
+    "                       CRSArrTime=p[7], \\\n",
+    "                       UniqueCarrier=p[8], \\\n",
+    "                       FlightNum=p[9], \\\n",
+    "                       TailNum=p[10], \\\n",
+    "                       ActualElapsedTime=p[11], \\\n",
+    "                       CRSElapsedTime=p[12], \\\n",
+    "                       AirTime=p[13], \\\n",
+    "                       ArrDelay=int(p[14].replace(\"NA\", \"0\")), \\\n",
+    "                       DepDelay=int(p[15].replace(\"NA\", \"0\")), \\\n",
+    "                       Origin=p[16], \\\n",
+    "                       Dest=p[17], \\\n",
+    "                       Distance=long(p[18]), \\\n",
+    "                       TaxiIn=p[19], \\\n",
+    "                       TaxiOut=p[20], \\\n",
+    "                       Cancelled=p[21], \\\n",
+    "                       CancellationCode=p[22], \\\n",
+    "                       Diverted=p[23], \\\n",
+    "                       CarrierDelay=int(p[24].replace(\"NA\", \"0\")), \\\n",
+    "                                              CarrierDelayStr=p[24], \\\n",
+    "                       WeatherDelay=int(p[25].replace(\"NA\", \"0\")), \\\n",
+    "                                              WeatherDelayStr=p[25], \\\n",
+    "                       NASDelay=int(p[26].replace(\"NA\", \"0\")), \\\n",
+    "                       SecurityDelay=int(p[27].replace(\"NA\", \"0\")), \\\n",
+    "                       LateAircraftDelay=int(p[28].replace(\"NA\", \"0\")))) \\\n",
+    "         .toDF()\n",
+    "\n",
+    "flights.write.mode(\"ignore\").parquet(protocol_name + working_storage + \"/\" + output_directory + \"/flights\")\n",
+    "sqlContext.registerDataFrameAsTable(flights, \"flights\")\n",
+    "flights.limit(10).toPandas()[[\"ArrDelay\",\"CarrierDelay\",\"CarrierDelayStr\",\"WeatherDelay\",\"WeatherDelayStr\",\"Distance\"]]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.13"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 1
+}
diff --git a/integration-tests/examples/test_templates/jupyter/template_preparation_spark.ipynb b/integration-tests/examples/test_templates/jupyter/template_preparation_spark.ipynb
new file mode 100644
index 0000000..7b45b83
--- /dev/null
+++ b/integration-tests/examples/test_templates/jupyter/template_preparation_spark.ipynb
@@ -0,0 +1,103 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "import org.apache.spark.sql._\n",
+    "\n",
+    "val working_storage = \"WORKING_STORAGE\"\n",
+    "val output_directory = \"jupyter/scala\"\n",
+    "val protocol_name = \"PROTOCOL_NAME\"\n",
+    "val sqlCtx = new SQLContext(sc)\n",
+    "val hc = sc.hadoopConfiguration\n",
+    "hc.set(\"hive.execution.engine\", \"mr\")\n",
+    "\n",
+    "def bucketPath(path: String) = {\n",
+    "    s\"$protocol_name://$working_storage/jupyter_dataset/$path\"\n",
+    "}\n",
+    "def fullPath(path: String) = {\n",
+    "    s\"$protocol_name://$working_storage/$output_directory/$path\"\n",
+    "}"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "val carriers = sqlCtx.read.\n",
+    "                        format(\"com.databricks.spark.csv\").\n",
+    "                        option(\"inferSchema\", \"true\").\n",
+    "                        option(\"header\", \"true\").\n",
+    "                        load(bucketPath(\"carriers.csv\"))\n",
+    "carriers.write.mode(SaveMode.Overwrite).parquet(fullPath(\"carriers/\"))\n",
+    "carriers.createOrReplaceTempView(\"carriers\")\n",
+    "carriers.show(20)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "val airports = sqlCtx.read.\n",
+    "                        format(\"com.databricks.spark.csv\").\n",
+    "                        option(\"inferSchema\", \"true\").\n",
+    "                        option(\"header\", \"true\").\n",
+    "                        load(bucketPath(\"airports.csv\"))\n",
+    "airports.write.mode(SaveMode.Overwrite).parquet(fullPath(\"airports/\"))\n",
+    "airports.createOrReplaceTempView(\"airports\")\n",
+    "airports.show(20)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import sqlCtx.implicits._\n",
+    "\n",
+    "val flights_w_na = sqlCtx.read.\n",
+    "                        format(\"com.databricks.spark.csv\").\n",
+    "                        option(\"inferSchema\", \"true\").\n",
+    "                        option(\"header\", \"true\").\n",
+    "                        option(\"nullValue\", \"NA\").\n",
+    "                        load(bucketPath(\"2008.csv.bz2\"))\n",
+    "val flights = flights_w_na.na.fill(0)\n",
+    "flights.write.mode(SaveMode.Overwrite).parquet(fullPath(\"flights/\"))\n",
+    "flights.createOrReplaceTempView(\"flights\")\n",
+    "flights.select($\"ArrDelay\",$\"CarrierDelay\",$\"WeatherDelay\",$\"Distance\").show(20)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Local Apache Toree - Scala (Scala-2.11.8, Spark-2.1.0)",
+   "language": "scala",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "name": "scala",
+   "version": "2.11.8"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 1
+}
diff --git a/integration-tests/examples/test_templates/jupyter/template_preparation_sparkr.ipynb b/integration-tests/examples/test_templates/jupyter/template_preparation_sparkr.ipynb
new file mode 100644
index 0000000..9e23a1e
--- /dev/null
+++ b/integration-tests/examples/test_templates/jupyter/template_preparation_sparkr.ipynb
@@ -0,0 +1,111 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "sparkR.session()\n",
+    "\n",
+    "working_storage <- 'WORKING_STORAGE'\n",
+    "output_directory <- 'jupyter/r'\n",
+    "protocol_name <- 'PROTOCOL_NAME'\n",
+    "\n",
+    "storage_path <- function(file_path) {\n",
+    "   sprintf('%s://%s/jupyter_dataset/%s', protocol_name, working_storage, file_path)\n",
+    "}\n",
+    "\n",
+    "full_path <- function(file_path) {\n",
+    "    sprintf('%s://%s/%s/%s', protocol_name, working_storage, output_directory, file_path)\n",
+    "}"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Parse and convert Carrier data to parquet"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "carriers <- read.df(storage_path(\"carriers.csv\"), \"csv\", header=\"true\", inferSchema=\"true\")\n",
+    "write.df(carriers, path=full_path(\"carriers\"), source=\"parquet\", mode=\"overwrite\")\n",
+    "createOrReplaceTempView(carriers, \"carriers\")\n",
+    "head(carriers, 20)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Parse and convert to parquet Airport data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "airports <- read.df(storage_path(\"airports.csv\"), \"csv\", header=\"true\", inferSchema=\"true\")\n",
+    "write.df(airports, path=full_path(\"airports\"), source=\"parquet\", mode=\"overwrite\")\n",
+    "createOrReplaceTempView(airports, \"airports\")\n",
+    "head(airports, 20)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Parse and convert Flights data to parquet"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "flights_w_na <- read.df(storage_path(\"2008.csv.bz2\"), \"csv\", header=\"true\", inferSchema=\"true\")\n",
+    "flights <- fillna(flights_w_na, 0, cols=colnames(flights_w_na)[c(15, 16, 25:29)])\n",
+    "write.df(flights, path=full_path(\"flights\"), source=\"parquet\", mode=\"overwrite\")\n",
+    "createOrReplaceTempView(flights, \"flights\")\n",
+    "colnames(flights)\n",
+    "head(flights_w_na, 5)[c(\"ArrDelay\",\"CarrierDelay\",\"WeatherDelay\",\"Distance\")]\n",
+    "head(flights, 5)[c(\"ArrDelay\",\"CarrierDelay\",\"WeatherDelay\",\"Distance\")]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Local SparkR (R-3.4.2, Spark-2.1.0)",
+   "language": "R",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "codemirror_mode": "r",
+   "file_extension": ".r",
+   "mimetype": "text/x-r-source",
+   "name": "R",
+   "pygments_lexer": "r",
+   "version": "3.4.2"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 1
+}
diff --git a/integration-tests/examples/test_templates/jupyter/template_visualization_pyspark.ipynb b/integration-tests/examples/test_templates/jupyter/template_visualization_pyspark.ipynb
new file mode 100644
index 0000000..278c6c6
--- /dev/null
+++ b/integration-tests/examples/test_templates/jupyter/template_visualization_pyspark.ipynb
@@ -0,0 +1,243 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "from pyspark.sql import SQLContext\n",
+    "from pyspark.sql import DataFrame\n",
+    "from pyspark.sql import Row\n",
+    "from pyspark.sql.types import *\n",
+    "import pandas as pd\n",
+    "import StringIO\n",
+    "import matplotlib.pyplot as plt\n",
+    "hc = sc._jsc.hadoopConfiguration()\n",
+    "hc.set(\"hive.execution.engine\", \"mr\")\n",
+    "\n",
+    "working_storage = 'WORKING_STORAGE'\n",
+    "output_directory = 'jupyter/py2'\n",
+    "protocol_name = 'PROTOCOL_NAME://'"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Load Carriers data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [],
+   "source": [
+    "carriers = sqlContext.read.parquet(protocol_name + working_storage + \"/\" + output_directory + \"/carriers\").cache()   \n",
+    "sqlContext.registerDataFrameAsTable(carriers, \"carriers\")\n",
+    "carriers.printSchema()\n",
+    "carriers.limit(20).toPandas()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Load Airports data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "airports = sqlContext.read.parquet(protocol_name + working_storage + \"/\" + output_directory + \"/airports\").cache()\n",
+    "sqlContext.registerDataFrameAsTable(airports, \"airports\")\n",
+    "airports.printSchema()\n",
+    "airports.limit(20).toPandas()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Load Flights data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [],
+   "source": [
+    "flights = sqlContext.read.parquet(protocol_name + working_storage + \"/\" + output_directory + \"/flights\").cache()\n",
+    "flights.printSchema()\n",
+    "sqlContext.registerDataFrameAsTable(flights, \"flights\")\n",
+    "flights.limit(10).toPandas()[[\"ArrDelay\",\"CarrierDelay\",\"CarrierDelayStr\",\"WeatherDelay\",\"WeatherDelayStr\",\"Distance\"]]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Taxonomy for ArrDelay, CarrierDelay, and Distance colums"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "flights.describe(\"ArrDelay\",\"CarrierDelay\",\"Distance\").toPandas()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Let's find the top 10 of the most unpunctual airlines"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [],
+   "source": [
+    "%matplotlib inline\n",
+    "import matplotlib \n",
+    "import matplotlib.pyplot as plt\n",
+    "matplotlib.style.use('ggplot')\n",
+    "\n",
+    "\n",
+    "delay = sqlContext.sql(\"select SUBSTR(c.description, 0, 15) as Carrier, WorkDayDelay, WeekendDelay from \\\n",
+    "   (select ceil( avg(f.ArrDelay + f.DepDelay) ) as WorkDayDelay, \\\n",
+    "   f.UniqueCarrier \\\n",
+    "   FROM flights f \\\n",
+    "         WHERE f.DayOfWeek < 6 \\\n",
+    "         GROUP BY f.UniqueCarrier ORDER BY WorkDayDelay desc limit 10) t \\\n",
+    "JOIN \\\n",
+    "  (select ceil( avg(f.ArrDelay + f.DepDelay) ) as WeekendDelay, \\\n",
+    "   f.UniqueCarrier \\\n",
+    "   FROM flights f \\\n",
+    "         WHERE f.DayOfWeek > 5 \\\n",
+    "         GROUP BY f.UniqueCarrier) t1 \\\n",
+    "ON t.UniqueCarrier = t1.UniqueCarrier \\\n",
+    "JOIN carriers c on t.UniqueCarrier = c.code order by WeekendDelay desc, WorkDayDelay desc \\\n",
+    "\").toPandas()\n",
+    "\n",
+    "color_range_days = [\"#2966FF\", \"#61F2FF\"]\n",
+    "delay[\"Average\"] = (delay.WorkDayDelay + delay.WeekendDelay) / 2\n",
+    "ax = delay.Average.plot(x='Carrier', linestyle='-', marker='o')\n",
+    "delay.plot(x='Carrier', y=['WorkDayDelay','WeekendDelay'], kind='bar', legend = True,  figsize=(12, 4), color=color_range_days, ax=ax);"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Number of flight performed by top companies"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [],
+   "source": [
+    "%matplotlib inline\n",
+    "import matplotlib \n",
+    "import matplotlib.pyplot as plt\n",
+    "matplotlib.style.use('ggplot')\n",
+    "\n",
+    "q = \"\"\"SELECT t.cnt as FlightsAmt, carriers.description as Carrier FROM (\n",
+    "            SELECT count(*) as cnt, flights.UniqueCarrier as carrier_code \n",
+    "                FROM flights GROUP BY flights.UniqueCarrier LIMIT 6) t \n",
+    "            LEFT JOIN carriers ON t.carrier_code = carriers.code\"\"\"\n",
+    "\n",
+    "topFlights = sqlContext.sql(q).toPandas()\n",
+    "topFlights.plot.pie(labels=topFlights[\"Carrier\"], autopct='%.2f', legend=False, y=\"FlightsAmt\", figsize=(12,12));"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## The average Flight Distance per Company"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "%matplotlib inline\n",
+    "import matplotlib \n",
+    "import matplotlib.pyplot as plt\n",
+    "matplotlib.style.use('ggplot')\n",
+    "\n",
+    "resultDistance = sqlContext.sql(\"SELECT SUBSTR(c.description, 0, 15) as Carrier, COUNT(Distance) AS Distance FROM flights f JOIN carriers c ON f.UniqueCarrier = c.code GROUP BY c.description ORDER BY distance DESC LIMIT 10\").toPandas()\n",
+    "\n",
+    "color_range =  [\"#2966FF\",\n",
+    " \t  \"#2E73FF\",\n",
+    " \t  \"#3380FF\",\n",
+    " \t  \"#388CFF\",\n",
+    " \t  \"#3D99FF\",\n",
+    " \t  \"#42A6FF\",\n",
+    " \t  \"#47B2FF\",\n",
+    " \t  \"#4CBFFF\",\n",
+    " \t  \"#52CCFF\",\n",
+    " \t  \"#57D9FF\",\n",
+    " \t  \"#5CE6FF\",\n",
+    " \t  \"#61F2FF\",\n",
+    "      \"#66FFFF\"]\n",
+    "\n",
+    "resultDistance.plot(x='Carrier',  y='Distance', kind='bar', color=color_range, legend = False);"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.13"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 1
+}
diff --git a/integration-tests/examples/test_templates/jupyter/template_visualization_sparkr.ipynb b/integration-tests/examples/test_templates/jupyter/template_visualization_sparkr.ipynb
new file mode 100644
index 0000000..10f09a7
--- /dev/null
+++ b/integration-tests/examples/test_templates/jupyter/template_visualization_sparkr.ipynb
@@ -0,0 +1,249 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "sparkR.session()\n",
+    "\n",
+    "full_path <- function(file_path) {\n",
+    "    working_storage <- \"WORKING_STORAGE\"\n",
+    "    output_directory <- \"jupyter/r\"\n",
+    "    protocol_name <- \"PROTOCOL_NAME://%s/%s/%s\"   \n",
+    "    sprintf(protocol_name, working_storage, output_directory, file_path)\n",
+    "}"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Load Carriers data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "carriers <- read.df(full_path(\"carriers\"), \"parquet\")\n",
+    "createOrReplaceTempView(carriers, \"carriers\")\n",
+    "printSchema(carriers)\n",
+    "head(carriers, 20)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Load Airports data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "airports <- read.df(full_path(\"airports\"), \"parquet\")\n",
+    "createOrReplaceTempView(airports, \"airports\")\n",
+    "printSchema(airports)\n",
+    "head(airports, 20)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Load Flights data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "flights <- read.df(full_path(\"flights\"), \"parquet\")\n",
+    "createOrReplaceTempView(flights, \"flights\")\n",
+    "printSchema(flights)\n",
+    "head(flights, 10)[c(\"ArrDelay\",\"CarrierDelay\",\"WeatherDelay\",\"Distance\")]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Taxonomy for ArrDelay, CarrierDelay, and Distance colums"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "head(summary(limit(flights,1000)))[c(\"summary\", \"ArrDelay\",\"CarrierDelay\",\"Distance\")]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Let's find the top 10 of the most unpunctual airlines"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "library(ggplot2)\n",
+    "library(reshape2)\n",
+    "\n",
+    "delay_sql <- sql(\"\n",
+    "SELECT SUBSTR(c.description, 0, 15) as Carrier, WorkDayDelay, WeekendDelay \n",
+    "FROM\n",
+    "       (SELECT CEIL( AVG(f.ArrDelay + f.DepDelay) ) as WorkDayDelay, f.UniqueCarrier\n",
+    "        FROM flights f\n",
+    "        WHERE f.DayOfWeek < 6\n",
+    "        GROUP BY f.UniqueCarrier \n",
+    "        ORDER BY WorkDayDelay desc \n",
+    "        LIMIT 10) t\n",
+    "    JOIN\n",
+    "       (SELECT CEIL( AVG(f.ArrDelay + f.DepDelay) ) as WeekendDelay, f.UniqueCarrier\n",
+    "        FROM flights f\n",
+    "        WHERE f.DayOfWeek > 5\n",
+    "        GROUP BY f.UniqueCarrier) t1\n",
+    "      ON t.UniqueCarrier = t1.UniqueCarrier\n",
+    "    JOIN carriers c \n",
+    "      ON t.UniqueCarrier = c.code \n",
+    "ORDER BY WeekendDelay DESC, WorkDayDelay DESC\n",
+    "\")\n",
+    "\n",
+    "delay <- collect(delay_sql)\n",
+    "delay_melt <- melt(delay[c('Carrier', 'WorkDayDelay', 'WeekendDelay')])\n",
+    "\n",
+    "color_range_days <- c(\"#2966FF\", \"#61F2FF\")\n",
+    "\n",
+    "ggplot(data=delay_melt, aes(x=Carrier, y=value, fill=variable)) +\n",
+    "    geom_bar(stat=\"identity\", width=.7, position=\"dodge\") +\n",
+    "    stat_summary(fun.y=mean, geom = \"line\", mapping = aes(group = 1), color=\"red\") +\n",
+    "    stat_summary(fun.y=mean, geom = \"point\", mapping = aes(group = 1), color=\"red\") +\n",
+    "    theme(legend.position=\"right\", axis.text.x=element_text(angle=90)) +\n",
+    "    labs(x=\"Carrier\", y=\"Minutes\", fill=\"Day Type\") +\n",
+    "    coord_fixed(ratio = .2) +\n",
+    "    scale_fill_manual(values=color_range_days) +\n",
+    "    scale_y_continuous(breaks=seq(0, 30, 5))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Number of flight performed by top companies"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [],
+   "source": [
+    "top_flights_sql <- sql(\"\n",
+    "SELECT t.cnt as FlightsAmt, carriers.description as Carrier \n",
+    "FROM (\n",
+    "    SELECT count(*) as cnt, flights.UniqueCarrier as carrier_code \n",
+    "    FROM flights \n",
+    "    GROUP BY flights.UniqueCarrier LIMIT 6) t \n",
+    "LEFT JOIN carriers \n",
+    "  ON t.carrier_code = carriers.code\n",
+    "\")\n",
+    "\n",
+    "top_flights <- collect(top_flights_sql)\n",
+    "\n",
+    "ggplot(transform(transform(top_flights, value=FlightsAmt/sum(FlightsAmt)), labPos=cumsum(FlightsAmt)-FlightsAmt/2), \n",
+    "       aes(x=\"\", y = FlightsAmt, fill = Carrier)) +\n",
+    "    geom_bar(width = 1, stat = \"identity\") +\n",
+    "    coord_polar(\"y\", start=0) +\n",
+    "    scale_fill_brewer(palette=\"Dark2\") +\n",
+    "    theme_bw() +\n",
+    "    theme(axis.text.x=element_blank() ,panel.grid.major=element_blank(),panel.grid.minor = element_blank(),panel.border = element_blank()) +\n",
+    "    geom_text(size=4, aes(y=labPos, label=scales::percent(value))) + \n",
+    "    geom_text(size=3, aes(x=1.8, y=labPos, label=top_flights$Carrier)) + \n",
+    "    theme(legend.position=\"none\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## The average Flight Distance per Company"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [],
+   "source": [
+    "distance_sql = sql(\"\n",
+    "SELECT SUBSTR(c.description, 0, 15) as Carrier, COUNT(Distance) AS Distance \n",
+    "FROM flights f \n",
+    "JOIN carriers c \n",
+    "  ON f.UniqueCarrier = c.code \n",
+    "GROUP BY c.description \n",
+    "ORDER BY distance DESC \n",
+    "LIMIT 10\n",
+    "\")\n",
+    "\n",
+    "distance <- collect(distance_sql)\n",
+    "\n",
+    "distance$Carrier <- factor(distance$Carrier, levels = distance$Carrier[order(-distance$Distance)])\n",
+    "\n",
+    "color_range <-  c(\"#2966FF\", \"#2E73FF\",\"#3380FF\", \"#388CFF\", \"#3D99FF\", \"#42A6FF\", \"#47B2FF\", \"#4CBFFF\", \"#52CCFF\", \n",
+    "                  \"#57D9FF\", \"#5CE6FF\", \"#61F2FF\", \"#66FFFF\")\n",
+    "\n",
+    "ggplot(data=distance, aes(x=Carrier, y=Distance, fill=Carrier)) +\n",
+    "    geom_bar(stat=\"identity\", width=.7, position=\"dodge\") +\n",
+    "    theme(axis.text.x=element_text(angle=90)) +\n",
+    "    scale_fill_manual(values=color_range) +\n",
+    "    theme(legend.position=\"none\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Local SparkR (R-3.4.2, Spark-2.1.0)",
+   "language": "R",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "codemirror_mode": "r",
+   "file_extension": ".r",
+   "mimetype": "text/x-r-source",
+   "name": "R",
+   "pygments_lexer": "r",
+   "version": "3.4.2"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 1
+}
diff --git a/integration-tests/examples/test_templates/rstudio/template_preparation.r b/integration-tests/examples/test_templates/rstudio/template_preparation.r
new file mode 100644
index 0000000..0c4377e
--- /dev/null
+++ b/integration-tests/examples/test_templates/rstudio/template_preparation.r
@@ -0,0 +1,44 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+sc <- sparkR.session(MASTER)
+
+working_storage <- "WORKING_STORAGE"
+output_directory <- "rstudio"
+protocol_name <- "PROTOCOL_NAME"
+
+bucket_path <- function(file_path) {
+    sprintf("%s://%s/rstudio_dataset/%s", protocol_name, working_storage, file_path)
+}
+
+full_path <- function(file_path) {
+    sprintf("%s://%s/%s/%s", protocol_name, working_storage, output_directory, file_path)
+}
+
+carriers <- read.df(bucket_path("carriers.csv"), "csv", header="true", inferSchema="true")
+write.df(carriers, path=full_path("carriers"), source="parquet", mode="overwrite")
+createOrReplaceTempView(carriers, "carriers")
+
+airports <- read.df(bucket_path("airports.csv"), "csv", header="true", inferSchema="true")
+write.df(airports, path=full_path("airports"), source="parquet", mode="overwrite")
+createOrReplaceTempView(airports, "airports")
+
+flights_w_na <- read.df(bucket_path("2008.csv.bz2"), "csv", header="true", inferSchema="true")
+flights <- fillna(flights_w_na, 0, cols=colnames(flights_w_na)[c(15, 16, 25:29)])
+write.df(flights, path=full_path("flights"), source="parquet", mode="overwrite")
+createOrReplaceTempView(flights, "flights")
+colnames(flights)
diff --git a/integration-tests/examples/test_templates/rstudio/template_visualization.r b/integration-tests/examples/test_templates/rstudio/template_visualization.r
new file mode 100644
index 0000000..e79f460
--- /dev/null
+++ b/integration-tests/examples/test_templates/rstudio/template_visualization.r
@@ -0,0 +1,122 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+sc <- sparkR.session(MASTER)
+
+full_path <- function(file_path) {
+    working_storage <- "WORKING_STORAGE"
+    output_directory <- "rstudio"
+    protocol_name <- "PROTOCOL_NAME"
+    sprintf("%s://%s/%s/%s", protocol_name, working_storage, output_directory, file_path)
+}
+
+carriers <- read.df(full_path("carriers"), "parquet")
+createOrReplaceTempView(carriers, "carriers")
+printSchema(carriers)
+
+airports <- read.df(full_path("airports"), "parquet")
+createOrReplaceTempView(airports, "airports")
+printSchema(airports)
+
+flights <- read.df(full_path("flights"), "parquet")
+createOrReplaceTempView(flights, "flights")
+printSchema(flights)
+
+library(ggplot2)
+library(reshape2)
+
+delay_sql <- sql("
+SELECT SUBSTR(c.description, 0, 15) as Carrier, WorkDayDelay, WeekendDelay
+FROM
+       (SELECT CEIL( AVG(f.ArrDelay + f.DepDelay) ) as WorkDayDelay, f.UniqueCarrier
+        FROM flights f
+        WHERE f.DayOfWeek < 6
+        GROUP BY f.UniqueCarrier
+        ORDER BY WorkDayDelay desc
+        LIMIT 10) t
+    JOIN
+       (SELECT CEIL( AVG(f.ArrDelay + f.DepDelay) ) as WeekendDelay, f.UniqueCarrier
+        FROM flights f
+        WHERE f.DayOfWeek > 5
+        GROUP BY f.UniqueCarrier) t1
+      ON t.UniqueCarrier = t1.UniqueCarrier
+    JOIN carriers c
+      ON t.UniqueCarrier = c.code
+ORDER BY WeekendDelay DESC, WorkDayDelay DESC
+")
+
+delay <- collect(delay_sql)
+delay_melt <- melt(delay[c('Carrier', 'WorkDayDelay', 'WeekendDelay')])
+
+color_range_days <- c("#2966FF", "#61F2FF")
+
+ggplot(data=delay_melt, aes(x=Carrier, y=value, fill=variable)) +
+    geom_bar(stat="identity", width=.7, position="dodge") +
+    stat_summary(fun.y=mean, geom = "line", mapping = aes(group = 1), color="red") +
+    stat_summary(fun.y=mean, geom = "point", mapping = aes(group = 1), color="red") +
+    theme(legend.position="right", axis.text.x=element_text(angle=90)) +
+    labs(x="Carrier", y="Minutes", fill="Day Type") +
+    coord_fixed(ratio = .2) +
+    scale_fill_manual(values=color_range_days) +
+    scale_y_continuous(breaks=seq(0, 30, 5))
+
+top_flights_sql <- sql("
+SELECT t.cnt as FlightsAmt, carriers.description as Carrier
+FROM (
+    SELECT count(*) as cnt, flights.UniqueCarrier as carrier_code
+    FROM flights
+    GROUP BY flights.UniqueCarrier LIMIT 6) t
+LEFT JOIN carriers
+  ON t.carrier_code = carriers.code
+")
+
+top_flights <- collect(top_flights_sql)
+
+ggplot(transform(transform(top_flights, value=FlightsAmt/sum(FlightsAmt)), labPos=cumsum(FlightsAmt)-FlightsAmt/2),
+       aes(x="", y = FlightsAmt, fill = Carrier)) +
+    geom_bar(width = 1, stat = "identity") +
+    coord_polar("y", start=0) +
+    scale_fill_brewer(palette="Dark2") +
+    theme_bw() +
+    theme(axis.text.x=element_blank() ,panel.grid.major=element_blank(),panel.grid.minor = element_blank(),panel.border = element_blank()) +
+    geom_text(size=4, aes(y=labPos, label=scales::percent(value))) +
+    geom_text(size=3, aes(x=1.8, y=labPos, label=top_flights$Carrier)) +
+    theme(legend.position="none")
+
+distance_sql = sql("
+SELECT SUBSTR(c.description, 0, 15) as Carrier, COUNT(Distance) AS Distance
+FROM flights f
+JOIN carriers c
+  ON f.UniqueCarrier = c.code
+GROUP BY c.description
+ORDER BY distance DESC
+LIMIT 10
+")
+
+distance <- collect(distance_sql)
+
+distance$Carrier <- factor(distance$Carrier, levels = distance$Carrier[order(-distance$Distance)])
+
+color_range <-  c("#2966FF", "#2E73FF","#3380FF", "#388CFF", "#3D99FF", "#42A6FF", "#47B2FF", "#4CBFFF", "#52CCFF",
+                  "#57D9FF", "#5CE6FF", "#61F2FF", "#66FFFF")
+
+ggplot(data=distance, aes(x=Carrier, y=Distance, fill=Carrier)) +
+    geom_bar(stat="identity", width=.7, position="dodge") +
+    theme(axis.text.x=element_text(angle=90)) +
+    scale_fill_manual(values=color_range) +
+    theme(legend.position="none")
+
diff --git a/integration-tests/examples/test_templates/tensor/template_preparation_tensor.ipynb b/integration-tests/examples/test_templates/tensor/template_preparation_tensor.ipynb
new file mode 100644
index 0000000..b1eadda
--- /dev/null
+++ b/integration-tests/examples/test_templates/tensor/template_preparation_tensor.ipynb
@@ -0,0 +1,285 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import os, cv2, random\n",
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt\n",
+    "%matplotlib inline \n",
+    "from keras.models import Sequential, load_model\n",
+    "from keras.layers import Dropout, Flatten, Convolution2D, MaxPooling2D, Dense, Activation\n",
+    "from keras.optimizers import Adam\n",
+    "from keras.callbacks import Callback, EarlyStopping\n",
+    "from keras.callbacks import BaseLogger, TensorBoard"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Constants definition"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "TRAIN_DIR = '/home/datalab-user/train/'\n",
+    "TEST_DIR = '/home/datalab-user/test/'\n",
+    "ROWS = 128\n",
+    "COLS = 128\n",
+    "CHANNELS = 3\n",
+    "TRAIN_IMAGES_COUNT = 1000\n",
+    "PATH_TO_LOGS = '/home/datalab-user/logs'"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Reading and adjusting images for training"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "all_images = [TRAIN_DIR+i for i in os.listdir(TRAIN_DIR)[:TRAIN_IMAGES_COUNT]]\n",
+    "test_images =  [TEST_DIR+i for i in os.listdir(TEST_DIR)]\n",
+    "random.shuffle(all_images)\n",
+    "test_coeff = int(len (all_images) * .9)\n",
+    "\n",
+    "train_images, test_images = all_images[:test_coeff], all_images[test_coeff:]\n",
+    "\n",
+    "def read_image(file_path):\n",
+    "    img = cv2.imread(file_path, cv2.IMREAD_COLOR)\n",
+    "    return cv2.resize(img, (ROWS, COLS), interpolation=cv2.INTER_CUBIC).reshape(ROWS, COLS, CHANNELS)\n",
+    "\n",
+    "def prepare_data(images):\n",
+    "    count = len(images)\n",
+    "    data = np.ndarray((count, ROWS, COLS, CHANNELS), dtype=np.uint8)\n",
+    "\n",
+    "    for i, image_file in enumerate(images):\n",
+    "        image = read_image(image_file)\n",
+    "        data[i] = image#.T\n",
+    "    return data\n",
+    "\n",
+    "train = prepare_data(train_images)\n",
+    "test = prepare_data(test_images)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Image counts"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "print(\"Train shape: {}\".format(train.shape))\n",
+    "print(\"Test shape: {}\".format(test.shape))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Assigning labels to training images"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "labels = []\n",
+    "for i in train_images:\n",
+    "    if 'dog' in i.split(\"/\")[-1] :\n",
+    "        labels.append(1)\n",
+    "    else:\n",
+    "        labels.append(0)\n",
+    "        \n",
+    "labels_test = []\n",
+    "for i in test_images:\n",
+    "    if 'dog' in i.split(\"/\")[-1] :\n",
+    "        labels_test.append(1)\n",
+    "    else:\n",
+    "        labels_test.append(0)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Building a convnet"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "optimizer = Adam(lr=1e-6)\n",
+    "objective = 'binary_crossentropy'\n",
+    "\n",
+    "def build_model():\n",
+    "    \n",
+    "    model = Sequential()\n",
+    "\n",
+    "    model.add(Convolution2D(32, 3, 3, border_mode='same', input_shape=(ROWS, COLS, 3), activation='relu'))\n",
+    "    model.add(Convolution2D(32, 3, 3, border_mode='same', activation='relu'))\n",
+    "    model.add(MaxPooling2D(pool_size=(2, 2)))\n",
+    "\n",
+    "    model.add(Convolution2D(64, 3, 3, border_mode='same', activation='relu'))\n",
+    "    model.add(Convolution2D(64, 3, 3, border_mode='same', activation='relu'))\n",
+    "    model.add(MaxPooling2D(pool_size=(2, 2)))\n",
+    "    \n",
+    "    model.add(Convolution2D(128, 3, 3, border_mode='same', activation='relu'))\n",
+    "    model.add(Convolution2D(128, 3, 3, border_mode='same', activation='relu'))\n",
+    "    model.add(MaxPooling2D(pool_size=(2, 2)))\n",
+    "\n",
+    "    model.add(Flatten())\n",
+    "    model.add(Dense(256, activation='relu'))\n",
+    "    model.add(Dropout(0.5))\n",
+    "    \n",
+    "    model.add(Dense(1))\n",
+    "    model.add(Activation('sigmoid'))\n",
+    "    \n",
+    "    model.compile(loss=objective, optimizer=optimizer, metrics=['accuracy'])\n",
+    "    return model\n",
+    "\n",
+    "\n",
+    "model = build_model()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Training the model"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "This block takes about 2.5-3 hours to execute if training on whole dataset of 22500 images"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "nb_epoch = 10\n",
+    "batch_size = 16\n",
+    "\n",
+    "class LossHistory(Callback):\n",
+    "    def on_train_begin(self, logs={}):\n",
+    "        self.losses = []\n",
+    "        self.val_losses = []\n",
+    "        \n",
+    "    def on_epoch_end(self, batch, logs={}):\n",
+    "        self.losses.append(logs.get('loss'))\n",
+    "        self.val_losses.append(logs.get('val_loss'))\n",
+    "\n",
+    "early_stopping = EarlyStopping(monitor='val_loss', patience=5, verbose=1, mode='auto')        \n",
+    "        \n",
+    "def train_and_test_model():\n",
+    "    history = LossHistory()\n",
+    "    tensorboard = TensorBoard(log_dir=PATH_TO_LOGS)\n",
+    "    model.fit(train, labels, batch_size=batch_size, nb_epoch=nb_epoch,\n",
+    "              validation_split=0.25, verbose=2, shuffle=True, callbacks=[history, early_stopping, tensorboard])\n",
+    "    \n",
+    "\n",
+    "    predictions = model.predict(test, verbose=2)\n",
+    "    return predictions, history\n",
+    "\n",
+    "predictions, history = train_and_test_model()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Saving the model and weights"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "path_to_model = '/home/datalab-user/model_1000.json'\n",
+    "path_to_weights = '/home/datalab-user/weigths_1000.h5'\n",
+    "\n",
+    "model.save(path_to_model)\n",
+    "model.save_weights(path_to_weights)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Plotting learning curves"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "loss = history.losses\n",
+    "val_loss = history.val_losses\n",
+    "\n",
+    "plt.xlabel('Epochs')\n",
+    "plt.ylabel('Loss')\n",
+    "plt.title('VGG-16 Loss Trend')\n",
+    "plt.plot(loss, 'blue', label='Training Loss')\n",
+    "plt.plot(val_loss, 'green', label='Validation Loss')\n",
+    "plt.xticks(range(0,len(loss))[0::2])\n",
+    "plt.legend()\n",
+    "plt.show()"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.13"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/integration-tests/examples/test_templates/tensor/template_visualization_tensor.ipynb b/integration-tests/examples/test_templates/tensor/template_visualization_tensor.ipynb
new file mode 100644
index 0000000..c233ab2
--- /dev/null
+++ b/integration-tests/examples/test_templates/tensor/template_visualization_tensor.ipynb
@@ -0,0 +1,139 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import os, cv2\n",
+    "from keras.models import load_model\n",
+    "import matplotlib.pyplot as plt\n",
+    "%matplotlib inline\n",
+    "import numpy as np\n",
+    "import tensorflow as tf"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Pathes to model and weights"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "path_to_model = '/home/datalab-user/model_1000.json'\n",
+    "path_to_weights = '/home/datalab-user/weigths_1000.h5'"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Loading test images"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "ROWS = 128\n",
+    "COLS = 128\n",
+    "CHANNELS = 3\n",
+    "TEST_DIR = '/home/datalab-user/test/'\n",
+    "all_image_names = os.listdir(TEST_DIR)\n",
+    "all_image_names.sort()\n",
+    "test_images =  [TEST_DIR+i for i in all_image_names[6:11] + all_image_names[19:32] + all_image_names[33:34]]\n",
+    "\n",
+    "def read_image(file_path):\n",
+    "    img = cv2.imread(file_path, cv2.IMREAD_COLOR)\n",
+    "    return cv2.resize(img, (ROWS, COLS), interpolation=cv2.INTER_CUBIC).reshape(ROWS, COLS, CHANNELS)\n",
+    "\n",
+    "def prep_data(images):\n",
+    "    count = len(images)\n",
+    "    data = np.ndarray((count, ROWS, COLS, CHANNELS), dtype=np.uint8)\n",
+    "\n",
+    "    for i, image_file in enumerate(images):\n",
+    "        image = read_image(image_file)\n",
+    "        data[i] = image\n",
+    "    return data\n",
+    "test = prep_data(test_images)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Loading the model and making predictions on test data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "with tf.device('/cpu:0'):\n",
+    "    model = load_model(path_to_model)\n",
+    "    model.load_weights(path_to_weights)\n",
+    "    predictions = model.predict(test, verbose=2)     "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Visualizing results (rendering can take about a minute)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [],
+   "source": [
+    "plt.figure(figsize=(16, 12))\n",
+    "for i in range(0, 12):\n",
+    "    plt.subplot(3, 4, i+1)\n",
+    "    if predictions[i, 0] >= 0.5: \n",
+    "        plt.title('{:.2%} Dog'.format(predictions[i][0]))\n",
+    "    else: \n",
+    "        plt.title('{:.2%} Cat'.format(1-predictions[i][0]))\n",
+    "        \n",
+    "    plt.imshow(cv2.cvtColor(test[i], cv2.COLOR_BGR2RGB))\n",
+    "    plt.axis('off')\n",
+    "plt.show()"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "KERNEL_NAME"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.13"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/integration-tests/examples/test_templates/zeppelin/template_preparation_pyspark.json b/integration-tests/examples/test_templates/zeppelin/template_preparation_pyspark.json
new file mode 100644
index 0000000..abf99f9
--- /dev/null
+++ b/integration-tests/examples/test_templates/zeppelin/template_preparation_pyspark.json
@@ -0,0 +1 @@
+{"paragraphs":[{"title":"Init Spark","text":"%INTERPRETER_NAME\nfrom pyspark.sql import SQLContext\nfrom pyspark.sql import DataFrame\nfrom pyspark.sql import Row\nfrom pyspark.sql.types import *\nimport pandas as pd\nimport StringIO\nimport matplotlib.pyplot as plt\nhc = sc._jsc.hadoopConfiguration()\nhc.set(\"hive.execution.engine\", \"mr\")","dateUpdated":"2018-01-03T14:13:24+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":6,"editorMode":"ace/mod [...]
\ No newline at end of file
diff --git a/integration-tests/examples/test_templates/zeppelin/template_preparation_spark.json b/integration-tests/examples/test_templates/zeppelin/template_preparation_spark.json
new file mode 100644
index 0000000..7d8e7ec
--- /dev/null
+++ b/integration-tests/examples/test_templates/zeppelin/template_preparation_spark.json
@@ -0,0 +1 @@
+{"paragraphs":[{"title":"Initialize. Connect to bucket","text":"%INTERPRETER_NAME\r\nimport org.apache.spark.sql._\r\n\r\nval workingStorage = \"WORKING_STORAGE\"\r\nval output_directory = \"zeppelin/scala\"\r\nval protocolName = \"PROTOCOL_NAME\"\r\nval sqlCtx = new SQLContext(sc)\r\nval hc = sc.hadoopConfiguration\r\nhc.set(\"hive.execution.engine\", \"mr\")\r\n","dateUpdated":"2018-01-03T14:29:14+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":12 [...]
\ No newline at end of file
diff --git a/integration-tests/examples/test_templates/zeppelin/template_preparation_sparkr.json b/integration-tests/examples/test_templates/zeppelin/template_preparation_sparkr.json
new file mode 100644
index 0000000..367bd74
--- /dev/null
+++ b/integration-tests/examples/test_templates/zeppelin/template_preparation_sparkr.json
@@ -0,0 +1 @@
+{"paragraphs":[{"text":"%INTERPRETER_NAME\nsparkR.session()\n\nworking_storage <- \"WORKING_STORAGE\"\noutput_directory <- \"zeppelin/r\"\nprotocol_name <- 'PROTOCOL_NAME'\nfull_path <- function(file_path) {\n    sprintf('%s://%s/%s/%s', protocol_name, working_storage, output_directory, file_path)\n}\nbucket_path <- function(file_path) {\n    sprintf('%s://%s/zeppelin_dataset/%s', protocol_name, working_storage, file_path)\n}\ncarriers <- read.df(bucket_path(\"carriers.csv\"), \"csv\",  [...]
\ No newline at end of file
diff --git a/integration-tests/examples/test_templates/zeppelin/template_visualization_pyspark.json b/integration-tests/examples/test_templates/zeppelin/template_visualization_pyspark.json
new file mode 100644
index 0000000..b363748
--- /dev/null
+++ b/integration-tests/examples/test_templates/zeppelin/template_visualization_pyspark.json
@@ -0,0 +1 @@
+{"paragraphs":[{"text":"%INTERPRETER_NAME\r\n\r\nfrom pyspark.sql import SQLContext\r\nfrom pyspark.sql import DataFrame\r\nfrom pyspark.sql import Row\r\nfrom pyspark.sql.types import *\r\nimport pandas as pd\r\nimport StringIO\r\nimport matplotlib\r\nmatplotlib.style.use('ggplot')\r\nimport matplotlib.pyplot as plt\r\nplt.switch_backend('WebAgg')\r\nimport os\r\nos.system(\"export DISPLAY=:0\")\r\nhc = sc._jsc.hadoopConfiguration()\r\nhc.set(\"hive.execution.engine\", \"mr\")\r\n\r\n\ [...]
\ No newline at end of file
diff --git a/integration-tests/examples/test_templates/zeppelin/template_visualization_sparkr.json b/integration-tests/examples/test_templates/zeppelin/template_visualization_sparkr.json
new file mode 100644
index 0000000..62c1400
--- /dev/null
+++ b/integration-tests/examples/test_templates/zeppelin/template_visualization_sparkr.json
@@ -0,0 +1 @@
+{"paragraphs":[{"text":"%INTERPRETER_NAME\nsparkR.session()\n\nfull_path <- function(file_path) {\n    working_storage <- \"WORKING_STORAGE\"\n    output_directory <- \"zeppelin/r\"\n    protocol_name <- 'PROTOCOL_NAME'\n    sprintf('%s://%s/%s/%s', protocol_name, working_storage, output_directory, file_path)\n}","dateUpdated":"2018-01-04T09:24:59+0000","config":{"colWidth":12,"editorMode":"ace/mode/text","results":{},"enabled":true,"editorSetting":{"language":"text","editOnDblClick":fa [...]
\ No newline at end of file
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/cloud/VirtualMachineStatusChecker.java b/integration-tests/src/main/java/com/epam/datalab/automation/cloud/VirtualMachineStatusChecker.java
new file mode 100644
index 0000000..af5b0ea
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/cloud/VirtualMachineStatusChecker.java
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.cloud;
+
+import com.epam.datalab.automation.cloud.aws.AmazonHelper;
+import com.epam.datalab.automation.cloud.aws.AmazonInstanceState;
+import com.epam.datalab.automation.cloud.azure.AzureHelper;
+import com.epam.datalab.automation.cloud.gcp.GcpHelper;
+import com.epam.datalab.automation.cloud.gcp.GcpInstanceState;
+import com.epam.datalab.automation.helper.CloudProvider;
+import com.epam.datalab.automation.helper.ConfigPropertyValue;
+import com.microsoft.azure.management.compute.PowerState;
+import org.testng.Assert;
+
+import java.io.IOException;
+
+public class VirtualMachineStatusChecker {
+
+	private static final String UNKNOWN_CLOUD_PROVIDER = "Unknown cloud provider";
+
+	private VirtualMachineStatusChecker(){}
+
+    public static void checkIfRunning(String tagNameValue, boolean restrictionMode)
+			throws InterruptedException, IOException {
+
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+                AmazonHelper.checkAmazonStatus(tagNameValue, AmazonInstanceState.RUNNING);
+                break;
+            case CloudProvider.AZURE_PROVIDER:
+                AzureHelper.checkAzureStatus(tagNameValue, PowerState.RUNNING, restrictionMode);
+                break;
+            case CloudProvider.GCP_PROVIDER:
+                GcpHelper.checkGcpStatus(tagNameValue, ConfigPropertyValue.getGcpDataLabProjectId(),
+                        GcpInstanceState.RUNNING, restrictionMode,
+                        GcpHelper.getAvailableZonesForProject(ConfigPropertyValue.getGcpDataLabProjectId()));
+                break;
+            default:
+                Assert.fail(UNKNOWN_CLOUD_PROVIDER);
+        }
+
+    }
+
+    public static void checkIfTerminated(String tagNameValue, boolean restrictionMode)
+			throws InterruptedException, IOException {
+
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+                AmazonHelper.checkAmazonStatus(tagNameValue, AmazonInstanceState.TERMINATED);
+                break;
+            case CloudProvider.AZURE_PROVIDER:
+                AzureHelper.checkAzureStatus(tagNameValue, PowerState.STOPPED, restrictionMode);
+                break;
+            case CloudProvider.GCP_PROVIDER:
+                GcpHelper.checkGcpStatus(tagNameValue, ConfigPropertyValue.getGcpDataLabProjectId(),
+                        GcpInstanceState.TERMINATED, restrictionMode,
+                        GcpHelper.getAvailableZonesForProject(ConfigPropertyValue.getGcpDataLabProjectId()));
+                break;
+            default:
+                Assert.fail(UNKNOWN_CLOUD_PROVIDER);
+        }
+
+    }
+
+    public static void checkIfStopped(String tagNameValue, boolean restrictionMode)
+			throws InterruptedException, IOException {
+
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+                AmazonHelper.checkAmazonStatus(tagNameValue, AmazonInstanceState.STOPPED);
+                break;
+            case CloudProvider.AZURE_PROVIDER:
+                AzureHelper.checkAzureStatus(tagNameValue, PowerState.DEALLOCATED, restrictionMode);
+                break;
+            case CloudProvider.GCP_PROVIDER:
+                GcpHelper.checkGcpStatus(tagNameValue, ConfigPropertyValue.getGcpDataLabProjectId(),
+                        GcpInstanceState.TERMINATED, restrictionMode,
+                        GcpHelper.getAvailableZonesForProject(ConfigPropertyValue.getGcpDataLabProjectId()));
+                break;
+            default:
+                Assert.fail(UNKNOWN_CLOUD_PROVIDER);
+        }
+
+    }
+
+    public static String getStartingStatus() {
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+                return AmazonInstanceState.STARTING.toString();
+            case CloudProvider.AZURE_PROVIDER:
+                return PowerState.STARTING.toString();
+            case CloudProvider.GCP_PROVIDER:
+				return GcpInstanceState.STARTING.toString();
+            default:
+                return "";
+        }
+
+    }
+
+    public static String getRunningStatus(){
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+                return AmazonInstanceState.RUNNING.toString();
+            case CloudProvider.AZURE_PROVIDER:
+                return PowerState.RUNNING.toString();
+            case CloudProvider.GCP_PROVIDER:
+                return GcpInstanceState.RUNNING.toString();
+            default:
+                return "";
+        }
+
+    }
+
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/cloud/aws/AmazonHelper.java b/integration-tests/src/main/java/com/epam/datalab/automation/cloud/aws/AmazonHelper.java
new file mode 100644
index 0000000..4235de9
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/cloud/aws/AmazonHelper.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.cloud.aws;
+
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.BasicAWSCredentials;
+import com.amazonaws.regions.Region;
+import com.amazonaws.regions.Regions;
+import com.amazonaws.services.ec2.AmazonEC2;
+import com.amazonaws.services.ec2.AmazonEC2Client;
+import com.amazonaws.services.ec2.model.DescribeInstancesRequest;
+import com.amazonaws.services.ec2.model.DescribeInstancesResult;
+import com.amazonaws.services.ec2.model.Filter;
+import com.amazonaws.services.ec2.model.Instance;
+import com.amazonaws.services.ec2.model.Reservation;
+import com.amazonaws.services.ec2.model.Tag;
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.AmazonS3Client;
+import com.amazonaws.services.s3.model.AccessControlList;
+import com.amazonaws.services.s3.model.Grant;
+import com.epam.datalab.automation.exceptions.CloudException;
+import com.epam.datalab.automation.helper.ConfigPropertyValue;
+import com.epam.datalab.automation.helper.NamingHelper;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.testng.Assert;
+
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.List;
+
+public class AmazonHelper {
+
+    private static final Logger LOGGER = LogManager.getLogger(AmazonHelper.class);
+	private static final Duration CHECK_TIMEOUT = Duration.parse("PT10m");
+	private static final String LOCALHOST_IP = ConfigPropertyValue.get("LOCALHOST_IP");
+
+	private AmazonHelper(){}
+	
+	private static AWSCredentials getCredentials() {
+		return new BasicAWSCredentials(ConfigPropertyValue.getAwsAccessKeyId(), ConfigPropertyValue.getAwsSecretAccessKey());
+	}
+	
+	private static Region getRegion() {
+		return Region.getRegion(Regions.fromName(ConfigPropertyValue.getAwsRegion()));
+	}
+
+	private static List<Instance> getInstances(String instanceName) {
+		AWSCredentials credentials = getCredentials();
+		AmazonEC2 ec2 = new AmazonEC2Client(credentials);
+		ec2.setRegion(getRegion());
+
+		List<String> valuesT1 = new ArrayList<>();
+		valuesT1.add(instanceName + "*");
+		Filter filter = new Filter("tag:" + NamingHelper.getServiceBaseName() + "-tag", valuesT1);
+
+		DescribeInstancesRequest describeInstanceRequest = new DescribeInstancesRequest().withFilters(filter);
+		DescribeInstancesResult describeInstanceResult = ec2.describeInstances(describeInstanceRequest);
+
+		List<Reservation> reservations = describeInstanceResult.getReservations();
+
+		if (reservations.isEmpty()) {
+			throw new CloudException("Instance " + instanceName + " in Amazon not found");
+		}
+
+		List<Instance> instances = reservations.get(0).getInstances();
+		if (instances.isEmpty()) {
+			throw new CloudException("Instance " + instanceName + " in Amazon not found");
+		}
+
+		return instances;
+	}
+
+	public static Instance getInstance(String instanceName) {
+    	return (ConfigPropertyValue.isRunModeLocal() ?
+    			new Instance()
+            		.withPrivateDnsName("localhost")
+            		.withPrivateIpAddress(LOCALHOST_IP)
+            		.withPublicDnsName("localhost")
+            		.withPublicIpAddress(LOCALHOST_IP)
+            		.withTags(new Tag()
+            					.withKey("Name")
+            					.withValue(instanceName)) :
+            	getInstances(instanceName).get(0));
+    }
+
+	public static void checkAmazonStatus(String instanceName, AmazonInstanceState expAmazonState) throws
+			InterruptedException {
+        LOGGER.info("Check status of instance {} on Amazon: {}", instanceName);
+        if (ConfigPropertyValue.isRunModeLocal()) {
+        	LOGGER.info("Amazon instance {} fake state is {}", instanceName, expAmazonState);
+        	return;
+        }
+        
+        String instanceState;
+        long requestTimeout = ConfigPropertyValue.getAwsRequestTimeout().toMillis();
+    	long timeout = CHECK_TIMEOUT.toMillis();
+        long expiredTime = System.currentTimeMillis() + timeout;
+        Instance instance;
+        while (true) {
+        	instance = AmazonHelper.getInstance(instanceName);
+        	instanceState = instance.getState().getName();
+        	if (!instance.getState().getName().equals("shutting-down")) {
+        		break;
+        	}
+        	if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
+                LOGGER.info("Amazon instance {} state is {}", instanceName, instanceState);
+        		throw new CloudException("Timeout has been expired for check amazon instance " + instanceState);
+            }
+            Thread.sleep(requestTimeout);
+        }
+        
+        for (Instance i : AmazonHelper.getInstances(instanceName)) {
+            LOGGER.info("Amazon instance {} state is {}. Instance id {}, private IP {}, public IP {}",
+            		instanceName, instanceState, i.getInstanceId(), i.getPrivateIpAddress(), i.getPublicIpAddress());
+		}
+        Assert.assertEquals(instanceState, expAmazonState.toString(), "Amazon instance " + instanceName + " state is not correct. Instance id " +
+        		instance.getInstanceId() + ", private IP " + instance.getPrivateIpAddress() + ", public IP " + instance.getPublicIpAddress());
+    }
+
+    public static void printBucketGrants(String bucketName){
+        LOGGER.info("Print grants for bucket {} on Amazon: " , bucketName);
+        if (ConfigPropertyValue.isRunModeLocal()) {
+        	LOGGER.info("  action skipped for run in local mode");
+        	return;
+        }
+        AWSCredentials credentials = getCredentials();
+        AmazonS3 s3 = new AmazonS3Client(credentials);
+        
+        s3.setRegion(getRegion());
+        AccessControlList acl = s3.getBucketAcl(bucketName);
+        for (Grant grant : acl.getGrants()) {
+            LOGGER.info(grant);
+		}
+    }
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/cloud/aws/AmazonInstanceState.java b/integration-tests/src/main/java/com/epam/datalab/automation/cloud/aws/AmazonInstanceState.java
new file mode 100644
index 0000000..2d49bec
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/cloud/aws/AmazonInstanceState.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.cloud.aws;
+
+public enum AmazonInstanceState {
+    STARTING,
+    RUNNING,
+    TERMINATED,
+    STOPPED;
+	
+    @Override
+    public String toString() {
+    	return super.toString().toLowerCase();
+    }
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/cloud/azure/AzureHelper.java b/integration-tests/src/main/java/com/epam/datalab/automation/cloud/azure/AzureHelper.java
new file mode 100644
index 0000000..79c6654
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/cloud/azure/AzureHelper.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.cloud.azure;
+
+import com.epam.datalab.automation.exceptions.CloudException;
+import com.epam.datalab.automation.helper.ConfigPropertyValue;
+import com.microsoft.azure.management.Azure;
+import com.microsoft.azure.management.compute.PowerState;
+import com.microsoft.azure.management.compute.VirtualMachine;
+import com.microsoft.azure.management.network.NetworkInterface;
+import com.microsoft.azure.management.network.PublicIPAddress;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.testng.Assert;
+
+import java.io.File;
+import java.io.IOException;
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+
+public class AzureHelper{
+
+    private static final Logger LOGGER = LogManager.getLogger(AzureHelper.class);
+    private static final Duration CHECK_TIMEOUT = Duration.parse("PT10m");
+    private static final String LOCALHOST_IP = ConfigPropertyValue.get("LOCALHOST_IP");
+
+	private static Azure azure = getAzureInstance();
+
+    private AzureHelper(){}
+
+	private static Azure getAzureInstance() {
+		if (!ConfigPropertyValue.isRunModeLocal() && Objects.isNull(azure)) {
+			try {
+				return Azure.configure().authenticate(
+						new File(ConfigPropertyValue.getAzureAuthFileName())).withDefaultSubscription();
+			} catch (IOException e) {
+				LOGGER.info("An exception occured: {}", e);
+			}
+		}
+		return azure;
+	}
+
+    private static List<VirtualMachine> getVirtualMachines(){
+        return !azure.virtualMachines().list().isEmpty() ? new ArrayList<>(azure.virtualMachines().list()) : null;
+    }
+
+    public static List<VirtualMachine> getVirtualMachinesByName(String name, boolean restrictionMode){
+        if(ConfigPropertyValue.isRunModeLocal()){
+
+            List<VirtualMachine> vmLocalModeList = new ArrayList<>();
+            VirtualMachine mockedVM = mock(VirtualMachine.class);
+            PublicIPAddress mockedIPAddress = mock(PublicIPAddress.class);
+            NetworkInterface mockedNetworkInterface = mock(NetworkInterface.class);
+            when(mockedVM.getPrimaryPublicIPAddress()).thenReturn(mockedIPAddress);
+            when(mockedIPAddress.ipAddress()).thenReturn(LOCALHOST_IP);
+            when(mockedVM.getPrimaryNetworkInterface()).thenReturn(mockedNetworkInterface);
+            when(mockedNetworkInterface.primaryPrivateIP()).thenReturn(LOCALHOST_IP);
+            vmLocalModeList.add(mockedVM);
+
+            return vmLocalModeList;
+
+        }
+        List<VirtualMachine> vmList = getVirtualMachines();
+        if(vmList == null){
+            LOGGER.warn("There is not any virtual machine in Azure");
+            return vmList;
+        }
+        if(restrictionMode){
+            vmList.removeIf(vm -> !hasName(vm, name));
+        }
+        else{
+            vmList.removeIf(vm -> !containsName(vm, name));
+        }
+        return !vmList.isEmpty() ? vmList : null;
+    }
+
+    private static boolean hasName(VirtualMachine vm, String name){
+        return vm.name().equals(name);
+    }
+
+    private static boolean containsName(VirtualMachine vm, String name){
+        return vm.name().contains(name);
+    }
+
+    private static PowerState getStatus(VirtualMachine vm){
+        return vm.powerState();
+    }
+
+	public static void checkAzureStatus(String virtualMachineName, PowerState expAzureState, boolean restrictionMode)
+			throws InterruptedException {
+        LOGGER.info("Check status of virtual machine with name {} on Azure", virtualMachineName);
+        if (ConfigPropertyValue.isRunModeLocal()) {
+            LOGGER.info("Azure virtual machine with name {} fake state is {}", virtualMachineName, expAzureState);
+            return;
+        }
+        List<VirtualMachine> vmsWithName = getVirtualMachinesByName(virtualMachineName, restrictionMode);
+        if(vmsWithName == null){
+            LOGGER.warn("There is not any virtual machine in Azure with name {}", virtualMachineName);
+            return;
+        }
+
+        PowerState virtualMachineState;
+        long requestTimeout = ConfigPropertyValue.getAzureRequestTimeout().toMillis();
+        long timeout = CHECK_TIMEOUT.toMillis();
+        long expiredTime = System.currentTimeMillis() + timeout;
+        VirtualMachine virtualMachine = vmsWithName.get(0);
+        while (true) {
+            virtualMachineState = getStatus(virtualMachine);
+            if (virtualMachineState == expAzureState) {
+                break;
+            }
+            if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
+                LOGGER.info("Azure virtual machine with name {} state is {}", virtualMachineName, getStatus(virtualMachine));
+                throw new CloudException("Timeout has been expired for check state of azure virtual machine with name " + virtualMachineName);
+            }
+            Thread.sleep(requestTimeout);
+        }
+
+        for (VirtualMachine  vm : vmsWithName) {
+            LOGGER.info("Azure virtual machine with name {} state is {}. Virtual machine id {}, private IP {}, public IP {}",
+                    virtualMachineName, getStatus(vm), vm.vmId(), vm.getPrimaryNetworkInterface().primaryPrivateIP(),
+                    vm.getPrimaryPublicIPAddress() != null ? vm.getPrimaryPublicIPAddress().ipAddress() : "doesn't exist for this resource type");
+        }
+        Assert.assertEquals(virtualMachineState, expAzureState, "Azure virtual machine with name " + virtualMachineName +
+                " state is not correct. Virtual machine id " +
+                virtualMachine.vmId() + ", private IP " + virtualMachine.getPrimaryNetworkInterface().primaryPrivateIP() +
+                ", public IP " +
+                (virtualMachine.getPrimaryPublicIPAddress() != null ? virtualMachine.getPrimaryPublicIPAddress().ipAddress() : "doesn't exist for this resource type" ));
+    }
+
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/cloud/gcp/GcpHelper.java b/integration-tests/src/main/java/com/epam/datalab/automation/cloud/gcp/GcpHelper.java
new file mode 100644
index 0000000..7d309ba
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/cloud/gcp/GcpHelper.java
@@ -0,0 +1,245 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.cloud.gcp;
+
+import com.epam.datalab.automation.exceptions.CloudException;
+import com.epam.datalab.automation.helper.ConfigPropertyValue;
+import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
+import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
+import com.google.api.client.http.HttpTransport;
+import com.google.api.client.json.JsonFactory;
+import com.google.api.client.json.jackson2.JacksonFactory;
+import com.google.api.services.compute.Compute;
+import com.google.api.services.compute.model.AccessConfig;
+import com.google.api.services.compute.model.Instance;
+import com.google.api.services.compute.model.InstanceList;
+import com.google.api.services.compute.model.NetworkInterface;
+import com.google.api.services.compute.model.Zone;
+import com.google.api.services.compute.model.ZoneList;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.testng.Assert;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.security.GeneralSecurityException;
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class GcpHelper {
+
+	private static final Logger LOGGER = LogManager.getLogger(GcpHelper.class);
+	private static final Duration CHECK_TIMEOUT = Duration.parse("PT10m");
+	private static final String LOCALHOST_IP = ConfigPropertyValue.get("LOCALHOST_IP");
+	private static final String NOT_EXIST = "doesn't exist for this resource type";
+
+	private GcpHelper() {
+	}
+
+	private static List<Instance> getInstances(String projectId, List<String> zones) throws IOException {
+		List<Instance> instanceList = new ArrayList<>();
+		for (String zone : zones) {
+			Compute.Instances.List request = ComputeService.getInstance().instances().list(projectId, zone);
+			InstanceList response;
+			do {
+				response = request.execute();
+				if (response.getItems() == null) {
+					continue;
+				}
+				instanceList.addAll(response.getItems());
+				request.setPageToken(response.getNextPageToken());
+			} while (response.getNextPageToken() != null);
+
+		}
+		return !instanceList.isEmpty() ? instanceList : null;
+	}
+
+	public static List<String> getInstancePrivateIps(Instance instance) {
+		return instance.getNetworkInterfaces().stream().filter(Objects::nonNull)
+				.map(NetworkInterface::getNetworkIP).filter(Objects::nonNull)
+				.collect(Collectors.toList());
+	}
+
+	public static List<String> getInstancePublicIps(Instance instance) {
+		return instance.getNetworkInterfaces()
+				.stream().filter(Objects::nonNull)
+				.map(NetworkInterface::getAccessConfigs)
+				.filter(Objects::nonNull).map(Collection::stream)
+				.flatMap(Function.identity()).filter(Objects::nonNull)
+				.map(AccessConfig::getNatIP).filter(Objects::nonNull)
+				.collect(Collectors.toList());
+	}
+
+
+	public static List<Instance> getInstancesByName(String name, String projectId, boolean restrictionMode,
+													List<String> zones) throws IOException {
+		if (ConfigPropertyValue.isRunModeLocal()) {
+			List<Instance> mockedInstanceList = new ArrayList<>();
+			Instance mockedInstance = mock(Instance.class);
+			NetworkInterface mockedNetworkInterface = mock(NetworkInterface.class);
+			when(mockedInstance.getNetworkInterfaces()).thenReturn(Collections.singletonList(mockedNetworkInterface));
+			when(mockedInstance.getNetworkInterfaces().get(0).getNetworkIP()).thenReturn(LOCALHOST_IP);
+			AccessConfig mockedAccessConfig = mock(AccessConfig.class);
+			when(mockedInstance.getNetworkInterfaces().get(0).getAccessConfigs())
+					.thenReturn(Collections.singletonList(mockedAccessConfig));
+			when(mockedInstance.getNetworkInterfaces().get(0).getAccessConfigs().get(0).getNatIP())
+					.thenReturn(LOCALHOST_IP);
+			mockedInstanceList.add(mockedInstance);
+			return mockedInstanceList;
+		}
+		List<Instance> instanceList = getInstances(projectId, zones);
+		if (instanceList == null) {
+			LOGGER.warn("There is not any virtual machine in GCP for project with id {}", projectId);
+			return instanceList;
+		}
+		if (restrictionMode) {
+			instanceList.removeIf(instance -> !hasName(instance, name));
+		} else {
+			instanceList.removeIf(instance -> !containsName(instance, name));
+		}
+		return !instanceList.isEmpty() ? instanceList : null;
+	}
+
+	private static boolean hasName(Instance instance, String name) {
+		return instance.getName().equals(name);
+	}
+
+	private static boolean containsName(Instance instance, String name) {
+		return instance.getName().contains(name);
+	}
+
+	private static String getStatus(Instance instance) {
+		return instance.getStatus().toLowerCase();
+	}
+
+	public static void checkGcpStatus(String instanceName, String projectId, GcpInstanceState expGcpStatus, boolean
+			restrictionMode, List<String> zones) throws InterruptedException, IOException {
+
+		LOGGER.info("Check status of instance with name {} on GCP", instanceName);
+		if (ConfigPropertyValue.isRunModeLocal()) {
+			LOGGER.info("GCP instance with name {} fake status is {}", instanceName, expGcpStatus);
+			return;
+		}
+		List<Instance> instancesWithName = getInstancesByName(instanceName, projectId, restrictionMode, zones);
+		if (instancesWithName == null) {
+			LOGGER.warn("There is not any instance in GCP with name {}", instanceName);
+			return;
+		}
+
+		String instanceStatus;
+		long requestTimeout = ConfigPropertyValue.getGcpRequestTimeout().toMillis();
+		long timeout = CHECK_TIMEOUT.toMillis();
+		long expiredTime = System.currentTimeMillis() + timeout;
+		Instance instance = instancesWithName.get(0);
+		while (true) {
+			instanceStatus = getStatus(instance);
+			if (instanceStatus.equalsIgnoreCase(expGcpStatus.toString())) {
+				break;
+			}
+			if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
+				LOGGER.info("GCP instance with name {} state is {}", instanceName, getStatus(instance));
+				throw new CloudException("Timeout has been expired for check status of GCP instance with " +
+						"name " + instanceName);
+			}
+			Thread.sleep(requestTimeout);
+		}
+
+		for (Instance inst : instancesWithName) {
+			LOGGER.info("GCP instance with name {} status is {}. Instance id {}, private IP {}, public " +
+							"IP {}",
+					instanceName, getStatus(inst), inst.getId(), (!getInstancePrivateIps(inst).isEmpty() ?
+							getInstancePrivateIps(inst).get(0) : NOT_EXIST),
+					(!getInstancePublicIps(inst).isEmpty() ? getInstancePublicIps(inst).get(0) : NOT_EXIST));
+		}
+		Assert.assertEquals(instanceStatus, expGcpStatus.toString(), "GCP instance with name " + instanceName +
+				" status is not correct. Instance id " + instance.getId() + ", private IP " +
+				(!getInstancePrivateIps(instance).isEmpty() ? getInstancePrivateIps(instance).get(0) : NOT_EXIST) +
+				", public IP " +
+				(!getInstancePublicIps(instance).isEmpty() ? getInstancePublicIps(instance).get(0) : NOT_EXIST));
+	}
+
+	public static List<String> getAvailableZonesForProject(String projectId) throws IOException {
+		if (ConfigPropertyValue.isRunModeLocal()) {
+			return Collections.emptyList();
+		}
+		List<Zone> zoneList = new ArrayList<>();
+		Compute.Zones.List request = ComputeService.getInstance().zones().list(projectId);
+		ZoneList response;
+		do {
+			response = request.execute();
+			if (response.getItems() == null) {
+				continue;
+			}
+			zoneList.addAll(response.getItems());
+			request.setPageToken(response.getNextPageToken());
+		} while (response.getNextPageToken() != null);
+		return zoneList.stream().map(Zone::getDescription).collect(Collectors.toList());
+	}
+
+	private static class ComputeService {
+
+		private static Compute instance;
+
+		private ComputeService() {
+		}
+
+		static synchronized Compute getInstance() throws IOException {
+			if (!ConfigPropertyValue.isRunModeLocal() && instance == null) {
+				try {
+					instance = createComputeService();
+				} catch (GeneralSecurityException e) {
+					LOGGER.info("An exception occured: {}", e);
+				}
+			}
+			return instance;
+		}
+
+		private static Compute createComputeService() throws IOException, GeneralSecurityException {
+			HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
+			JsonFactory jsonFactory = JacksonFactory.getDefaultInstance();
+
+			GoogleCredential credential =
+					GoogleCredential.fromStream(new FileInputStream(ConfigPropertyValue.getGcpAuthFileName()));
+			if (credential.createScopedRequired()) {
+				credential = credential.createScoped(
+						Collections.singletonList("https://www.googleapis.com/auth/cloud-platform"));
+			}
+
+			return new Compute.Builder(httpTransport, jsonFactory, credential)
+					.setApplicationName("Google-ComputeSample/0.1")
+					.build();
+		}
+
+	}
+
+}
+
+
+
+
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/cloud/gcp/GcpInstanceState.java b/integration-tests/src/main/java/com/epam/datalab/automation/cloud/gcp/GcpInstanceState.java
new file mode 100644
index 0000000..7d0207a
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/cloud/gcp/GcpInstanceState.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.cloud.gcp;
+
+public enum GcpInstanceState {
+	STARTING,
+	RUNNING,
+	TERMINATED,
+	STOPPED;
+
+	@Override
+	public String toString() {
+		return super.toString().toLowerCase();
+	}
+}
+
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/docker/AckStatus.java b/integration-tests/src/main/java/com/epam/datalab/automation/docker/AckStatus.java
new file mode 100644
index 0000000..16dee01
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/docker/AckStatus.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.docker;
+
+public class AckStatus {
+    private int status;
+    
+    private String message;
+
+    public AckStatus() {
+        status = 0;
+        message = "";
+    }
+
+	AckStatus(int status, String message) {
+        this.status = status;
+        this.message = message;
+    }
+
+    public int getStatus() {
+        return status;
+    }
+
+    public String getMessage() {
+        return message;
+    }
+
+    public boolean isOk() {
+        return status == 0;
+    }
+
+    @Override
+    public String toString() {
+        return isOk() ? "OK" : message;
+    }
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/docker/Bridge.java b/integration-tests/src/main/java/com/epam/datalab/automation/docker/Bridge.java
new file mode 100644
index 0000000..5a7ef0e
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/docker/Bridge.java
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.docker;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class Bridge {
+
+	@JsonProperty("IPAMConfig")
+	private Object ipamConfig;
+
+	@JsonProperty("Links")
+	private Object links;
+
+	@JsonProperty("Aliases")
+	private Object aliases;
+
+	@JsonProperty("NetworkID")
+	private String networkId;
+
+	@JsonProperty("EndpointID")
+	private String endpointId;
+
+	@JsonProperty("Gateway")
+	private String gateway;
+
+	@JsonProperty("IPAddress")
+	private String ipAddress;
+
+	@JsonProperty("IPPrefixLen")
+	private int ipPrefixLen;
+
+	@JsonProperty("IPv6Gateway")
+	private String ipv6Gateway;
+
+	@JsonProperty("GlobalIPv6Address")
+	private String globalIpv6Address;
+
+	@JsonProperty("GlobalIPv6PrefixLen")
+	private int globalIpv6PrefixLen;
+
+	@JsonProperty("MacAddress")
+	private String macAddress;
+
+
+	public Object getIpamConfig() {
+		return ipamConfig;
+	}
+
+	public void setIpamConfig(Object ipamConfig) {
+		this.ipamConfig = ipamConfig;
+	}
+
+	public Object getLinks() {
+		return links;
+	}
+
+	public void setLinks(Object links) {
+		this.links = links;
+	}
+
+	public Object getAliases() {
+		return aliases;
+	}
+
+	public void setAliases(Object aliases) {
+		this.aliases = aliases;
+	}
+
+	public String getNetworkId() {
+		return networkId;
+	}
+
+	public void setNetworkId(String networkId) {
+		this.networkId = networkId;
+	}
+
+	public String getEndpointId() {
+		return endpointId;
+	}
+
+	public void setEndpointId(String endpointId) {
+		this.endpointId = endpointId;
+	}
+
+	public String getGateway() {
+		return gateway;
+	}
+
+	public void setGateway(String gateway) {
+		this.gateway = gateway;
+	}
+
+	public String getIpAddress() {
+		return ipAddress;
+	}
+
+	public void setIpAddress(String ipAddress) {
+		this.ipAddress = ipAddress;
+	}
+
+	public int getIpPrefixLen() {
+		return ipPrefixLen;
+	}
+
+	public void setIpPrefixLen(int ipPrefixLen) {
+		this.ipPrefixLen = ipPrefixLen;
+	}
+
+	public String getIpv6Gateway() {
+		return ipv6Gateway;
+	}
+
+	public void setIpv6Gateway(String ipv6Gateway) {
+		this.ipv6Gateway = ipv6Gateway;
+	}
+
+	public String getGlobalIpv6Address() {
+		return globalIpv6Address;
+	}
+
+	public void setGlobalIpv6Address(String globalIpv6Address) {
+		this.globalIpv6Address = globalIpv6Address;
+	}
+
+	public int getGlobalIpv6PrefixLen() {
+		return globalIpv6PrefixLen;
+	}
+
+	public void setGlobalIpv6PrefixLen(int globalIpv6PrefixLen) {
+		this.globalIpv6PrefixLen = globalIpv6PrefixLen;
+	}
+
+	public String getMacAddress() {
+		return macAddress;
+	}
+
+	public void setMacAddress(String macAddress) {
+		this.macAddress = macAddress;
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/docker/Docker.java b/integration-tests/src/main/java/com/epam/datalab/automation/docker/Docker.java
new file mode 100644
index 0000000..3d52598
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/docker/Docker.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.docker;
+
+import com.epam.datalab.automation.exceptions.DockerException;
+import com.epam.datalab.automation.helper.ConfigPropertyValue;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.jcraft.jsch.ChannelExec;
+import com.jcraft.jsch.JSchException;
+import com.jcraft.jsch.Session;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.testng.Assert;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.List;
+
+public class Docker {
+    private static final Logger LOGGER = LogManager.getLogger(Docker.class);
+    
+    private static final String GET_CONTAINERS = "echo -e \"GET /containers/json?all=1 HTTP/1.0\\r\\n\" | nc -U /var/run/docker.sock";
+    
+    private static final String DOCKER_STATUS_EXIT0 = "Exited (0)";
+
+    private Docker(){}
+
+    public static void checkDockerStatus(String containerName, String ip)
+			throws IOException, InterruptedException, JSchException {
+        
+        LOGGER.info("Check docker status for instance {} and container {}", ip, containerName);
+        if (ConfigPropertyValue.isRunModeLocal()) {
+        	LOGGER.info("  check skipped for run in local mode");
+        	return;
+        }
+
+        Session session = SSHConnect.getConnect(ConfigPropertyValue.getClusterOsUser(), ip, 22);
+        ChannelExec getResult = SSHConnect.setCommand(session, GET_CONTAINERS);
+        InputStream in = getResult.getInputStream();
+        List<DockerContainer> dockerContainerList = getDockerContainerList(in);
+        AckStatus status = SSHConnect.checkAck(getResult);
+        Assert.assertTrue(status.isOk());
+        
+        DockerContainer dockerContainer = getDockerContainer(dockerContainerList, containerName);
+        LOGGER.debug("Docker container for {} has id {} and status {}", containerName, dockerContainer.getId(), dockerContainer.getStatus());
+        Assert.assertEquals(dockerContainer.getStatus().contains(DOCKER_STATUS_EXIT0), true, "Status of container is not Exited (0)");
+        LOGGER.info("Docker container {} has status {}", containerName, DOCKER_STATUS_EXIT0);
+    }
+
+    private static List<DockerContainer> getDockerContainerList(InputStream in) throws IOException {
+        
+        BufferedReader reader = new BufferedReader(new InputStreamReader(in));         
+        String line;
+        List<DockerContainer> dockerContainerList = null;
+
+        TypeReference<List<DockerContainer>> typeRef = new TypeReference<List<DockerContainer>>() { };
+        ObjectMapper mapper = new ObjectMapper();
+
+		List<String> result = new ArrayList<>();
+        while ((line = reader.readLine()) != null) {      
+             result.add(line);
+             if (line.contains("Id")) {
+            	 LOGGER.trace("Add docker container: {}", line);
+                 dockerContainerList = mapper.readValue(line, typeRef);
+             }       
+        }
+        
+        return dockerContainerList;
+    }
+
+	private static DockerContainer getDockerContainer(List<DockerContainer> dockerContainerList, String
+			containerName) {
+		for (DockerContainer dockerContainer : dockerContainerList) {
+			String name = dockerContainer.getNames().get(0);
+			if (name.contains(containerName)) {
+				return dockerContainer;
+			}
+		}
+        
+        final String msg = "Docker container for " + containerName + " not found";
+        LOGGER.error(msg);
+		StringBuilder containers = new StringBuilder("Container list:");
+		for (DockerContainer dockerContainer : dockerContainerList) {
+			containers.append(System.lineSeparator()).append(dockerContainer.getNames().get(0));
+		}
+		LOGGER.debug(containers.toString());
+
+		throw new DockerException("Docker container for " + containerName + " not found");
+    }
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/docker/DockerContainer.java b/integration-tests/src/main/java/com/epam/datalab/automation/docker/DockerContainer.java
new file mode 100644
index 0000000..f0c0c9e
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/docker/DockerContainer.java
@@ -0,0 +1,171 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.docker;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import java.util.List;
+
+public class DockerContainer {
+
+	@JsonProperty("Id")
+	private String id;
+
+	@JsonProperty("Names")
+	private List<String> names;
+
+	@JsonProperty("Image")
+	private String image;
+
+	@JsonProperty("ImageID")
+	private String imageID;
+
+	@JsonProperty("Command")
+	private String command;
+
+	@JsonProperty("Created")
+	private int created;
+
+	@JsonProperty("Ports")
+	private List<Object> ports;
+
+	@JsonProperty("Labels")
+	private Labels labels;
+
+	@JsonProperty("State")
+	private String state;
+
+	@JsonProperty("Status")
+	private String status;
+
+	@JsonProperty("HostConfig")
+	private HostConfig hostConfig;
+
+	@JsonProperty("NetworkSettings")
+	private NetworkSettings networkSettings;
+
+	@JsonProperty("Mounts")
+	private List<Object> mounts;
+
+
+    public String getId() {
+		return id;
+	}
+
+	public void setId(String id) {
+		this.id = id;
+	}
+
+	public List<String> getNames() {
+		return names;
+	}
+
+	public void setNames(List<String> names) {
+		this.names = names;
+	}
+
+	public String getImage() {
+		return image;
+	}
+
+	public void setImage(String image) {
+		this.image = image;
+	}
+
+	public String getImageID() {
+		return imageID;
+	}
+
+	public void setImageID(String imageID) {
+		this.imageID = imageID;
+	}
+
+	public String getCommand() {
+		return command;
+	}
+
+	public void setCommand(String command) {
+		this.command = command;
+	}
+
+	public int getCreated() {
+		return created;
+	}
+
+	public void setCreated(int created) {
+		this.created = created;
+	}
+
+	public List<Object> getPorts() {
+		return ports;
+	}
+
+	public void setPorts(List<Object> ports) {
+		this.ports = ports;
+	}
+
+	public Labels getLabels() {
+		return labels;
+	}
+
+	public void setLabels(Labels labels) {
+		this.labels = labels;
+	}
+
+	public String getState() {
+		return state;
+	}
+
+	public void setState(String state) {
+		this.state = state;
+	}
+
+	public String getStatus() {
+		return status;
+	}
+
+	public void setStatus(String status) {
+		this.status = status;
+	}
+
+	public HostConfig getHostConfig() {
+		return hostConfig;
+	}
+
+	public void setHostConfig(HostConfig hostConfig) {
+		this.hostConfig = hostConfig;
+	}
+
+	public NetworkSettings getNetworkSettings() {
+		return networkSettings;
+	}
+
+	public void setNetworkSettings(NetworkSettings networkSettings) {
+		this.networkSettings = networkSettings;
+	}
+
+	public List<Object> getMounts() {
+		return mounts;
+	}
+
+	public void setMounts(List<Object> mounts) {
+		this.mounts = mounts;
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/docker/HostConfig.java b/integration-tests/src/main/java/com/epam/datalab/automation/docker/HostConfig.java
new file mode 100644
index 0000000..a5a0adb
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/docker/HostConfig.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.docker;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class HostConfig {
+
+	@JsonProperty("NetworkMode")
+	private String networkMode;
+
+    public String getNetworkMode() {
+		return networkMode;
+    }
+
+    public void setNetworkMode(String networkMode) {
+		this.networkMode = networkMode;
+    }
+
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/docker/Labels.java b/integration-tests/src/main/java/com/epam/datalab/automation/docker/Labels.java
new file mode 100644
index 0000000..adbfe31
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/docker/Labels.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.docker;
+
+class Labels {
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/docker/NetworkSettings.java b/integration-tests/src/main/java/com/epam/datalab/automation/docker/NetworkSettings.java
new file mode 100644
index 0000000..d21e52e
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/docker/NetworkSettings.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.docker;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class NetworkSettings {
+
+	@JsonProperty("Networks")
+	private Networks networks;
+
+    public Networks getNetworks() {
+		return networks;
+    }
+
+    public void setNetworks(Networks networks) {
+		this.networks = networks;
+    }
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/docker/Networks.java b/integration-tests/src/main/java/com/epam/datalab/automation/docker/Networks.java
new file mode 100644
index 0000000..2ea6542
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/docker/Networks.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.docker;
+
+public class Networks {
+
+	private Bridge bridge;
+
+    public Bridge getBridge() {
+		return bridge;
+    }
+
+    public void setBridge(Bridge bridge) {
+		this.bridge = bridge;
+    }
+
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/docker/SSHConnect.java b/integration-tests/src/main/java/com/epam/datalab/automation/docker/SSHConnect.java
new file mode 100644
index 0000000..c09fdf3
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/docker/SSHConnect.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.docker;
+
+import com.epam.datalab.automation.helper.ConfigPropertyValue;
+import com.jcraft.jsch.Channel;
+import com.jcraft.jsch.ChannelExec;
+import com.jcraft.jsch.ChannelSftp;
+import com.jcraft.jsch.JSch;
+import com.jcraft.jsch.JSchException;
+import com.jcraft.jsch.Session;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.util.Properties;
+
+import static java.lang.System.err;
+import static java.lang.System.out;
+
+public class SSHConnect {
+	private static final Logger LOGGER = LogManager.getLogger(SSHConnect.class);
+	private static final String LOCALHOST_IP = ConfigPropertyValue.get("LOCALHOST_IP");
+	private static final String STRICT_HOST_KEY_CHECKING = "StrictHostKeyChecking";
+
+	private SSHConnect() {
+	}
+
+    public static Session getConnect(String username, String host, int port) throws JSchException {
+        Session session;
+        JSch jsch = new JSch();
+
+        Properties config = new Properties();
+		config.put(STRICT_HOST_KEY_CHECKING, "no");
+        
+        jsch.addIdentity(ConfigPropertyValue.getAccessKeyPrivFileName());
+        session = jsch.getSession(username, host, port);
+        session.setConfig(config);
+
+        LOGGER.info("Connecting as {} to {}:{}", username, host, port);
+        session.connect();
+
+        LOGGER.info("Getting connected to {}:{}", host, port);
+        return session;
+    }
+
+    public static Session getSession(String username, String host, int port) throws JSchException {
+        Session session;
+        JSch jsch = new JSch();
+
+        Properties config = new Properties();
+		config.put(STRICT_HOST_KEY_CHECKING, "no");
+
+        jsch.addIdentity(ConfigPropertyValue.getAccessKeyPrivFileName());
+        session = jsch.getSession(username, host, port);
+        session.setConfig(config);
+        session.connect();
+
+
+        LOGGER.info("Getting connected to {}:{}", host, port);
+        return session;
+    }
+
+    public static ChannelSftp getChannelSftp(Session session) throws JSchException {
+        Channel channel = session.openChannel("sftp");
+        channel.connect();
+		return (ChannelSftp) channel;
+    }
+
+    public static Session getForwardedConnect(String username, String hostAlias, int port) throws JSchException {
+        Session session;
+        JSch jsch = new JSch();
+        Properties config = new Properties();
+		config.put(STRICT_HOST_KEY_CHECKING, "no");
+
+        jsch.addIdentity(ConfigPropertyValue.getAccessKeyPrivFileName());
+        session = jsch.getSession(username, LOCALHOST_IP, port);
+        session.setConfig(config);
+        session.setHostKeyAlias(hostAlias);
+        session.connect();
+        LOGGER.info("Getting connected to {} through {}:{}", hostAlias, LOCALHOST_IP, port);
+        return session;
+    }
+
+	public static ChannelExec setCommand(Session session, String command) throws JSchException {
+        LOGGER.info("Setting command: {}", command);
+
+        ChannelExec channelExec = (ChannelExec)session.openChannel("exec");
+        channelExec.setCommand(command);
+        channelExec.connect();
+
+        return channelExec;
+    }
+
+	public static AckStatus checkAck(ChannelExec channel) throws InterruptedException {
+		channel.setOutputStream(out, true);
+		channel.setErrStream(err, true);
+
+        int status;
+        while(channel.getExitStatus() == -1) {
+            Thread.sleep(1000);
+        }
+        status = channel.getExitStatus();
+
+        return new AckStatus(status, "");
+    }
+
+	public static AckStatus checkAck(ChannelSftp channel) throws InterruptedException {
+		channel.setOutputStream(out, true);
+
+        int status;
+        while(channel.getExitStatus() == -1) {
+            Thread.sleep(1000);
+        }
+        status = channel.getExitStatus();
+
+        return new AckStatus(status, "");
+    }
+
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/exceptions/CloudException.java b/integration-tests/src/main/java/com/epam/datalab/automation/exceptions/CloudException.java
new file mode 100644
index 0000000..f440c39
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/exceptions/CloudException.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.exceptions;
+
+public class CloudException extends RuntimeException {
+
+	private static final long serialVersionUID = 1L;
+
+    public CloudException(String message){
+        super(message);
+    }
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/exceptions/DockerException.java b/integration-tests/src/main/java/com/epam/datalab/automation/exceptions/DockerException.java
new file mode 100644
index 0000000..69b6f13
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/exceptions/DockerException.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.exceptions;
+
+public class DockerException extends RuntimeException {
+
+	private static final long serialVersionUID = 1L;
+
+	public DockerException(String message) {
+		super(message);
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/exceptions/JenkinsException.java b/integration-tests/src/main/java/com/epam/datalab/automation/exceptions/JenkinsException.java
new file mode 100644
index 0000000..0171ce9
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/exceptions/JenkinsException.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.exceptions;
+
+public class JenkinsException extends RuntimeException {
+
+	private static final long serialVersionUID = 1L;
+
+	public JenkinsException(String message) {
+		super(message);
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/exceptions/LoadFailException.java b/integration-tests/src/main/java/com/epam/datalab/automation/exceptions/LoadFailException.java
new file mode 100644
index 0000000..875e9f1
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/exceptions/LoadFailException.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.exceptions;
+
+public class LoadFailException extends RuntimeException {
+
+	private static final long serialVersionUID = 1L;
+
+	public LoadFailException(String message, Exception cause) {
+		super(message, cause);
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/helper/CloudHelper.java b/integration-tests/src/main/java/com/epam/datalab/automation/helper/CloudHelper.java
new file mode 100644
index 0000000..76ef9ed
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/helper/CloudHelper.java
@@ -0,0 +1,219 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.helper;
+
+import com.amazonaws.services.ec2.model.Instance;
+import com.amazonaws.services.ec2.model.Tag;
+import com.epam.datalab.automation.cloud.aws.AmazonHelper;
+import com.epam.datalab.automation.cloud.azure.AzureHelper;
+import com.epam.datalab.automation.cloud.gcp.GcpHelper;
+import com.epam.datalab.automation.exceptions.CloudException;
+import com.epam.datalab.automation.model.DeployClusterDto;
+import com.epam.datalab.automation.model.DeployDataProcDto;
+import com.epam.datalab.automation.model.DeployEMRDto;
+import com.epam.datalab.automation.model.NotebookConfig;
+import org.apache.commons.lang3.StringUtils;
+
+import java.io.IOException;
+import java.util.List;
+
+public class CloudHelper {
+
+    private CloudHelper(){}
+
+	public static String getInstancePublicIP(String name, boolean restrictionMode) throws IOException {
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+                return AmazonHelper.getInstance(name).getPublicIpAddress();
+            case CloudProvider.AZURE_PROVIDER:
+                if(AzureHelper.getVirtualMachinesByName(name, restrictionMode) != null){
+                    return AzureHelper.getVirtualMachinesByName(name, restrictionMode).get(0)
+                            .getPrimaryPublicIPAddress().ipAddress();
+                } else return null;
+            case CloudProvider.GCP_PROVIDER:
+                List<com.google.api.services.compute.model.Instance> instanceList =
+                        GcpHelper.getInstancesByName(name, ConfigPropertyValue.getGcpDataLabProjectId(),
+                                restrictionMode,
+                                GcpHelper.getAvailableZonesForProject(ConfigPropertyValue.getGcpDataLabProjectId()));
+                if (instanceList != null && !GcpHelper.getInstancePublicIps(instanceList.get(0)).isEmpty()) {
+                    return GcpHelper.getInstancePublicIps(instanceList.get(0)).get(0);
+                } else return null;
+            default:
+                return null;
+        }
+    }
+
+	public static String getInstancePrivateIP(String name, boolean restrictionMode) throws IOException {
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+                return AmazonHelper.getInstance(name).getPrivateIpAddress();
+            case CloudProvider.AZURE_PROVIDER:
+                if(AzureHelper.getVirtualMachinesByName(name, restrictionMode) != null){
+                    return AzureHelper.getVirtualMachinesByName(name, restrictionMode).get(0)
+                            .getPrimaryNetworkInterface().primaryPrivateIP();
+                } else return null;
+            case CloudProvider.GCP_PROVIDER:
+                List<com.google.api.services.compute.model.Instance> instanceList =
+                        GcpHelper.getInstancesByName(name, ConfigPropertyValue.getGcpDataLabProjectId(), restrictionMode,
+                                GcpHelper.getAvailableZonesForProject(ConfigPropertyValue.getGcpDataLabProjectId()));
+                if (instanceList != null && !GcpHelper.getInstancePrivateIps(instanceList.get(0)).isEmpty()) {
+                    return GcpHelper.getInstancePrivateIps(instanceList.get(0)).get(0);
+                } else return null;
+            default:
+                return null;
+        }
+    }
+
+	static String getInstanceNameByCondition(String name, boolean restrictionMode) throws IOException {
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+                Instance instance = AmazonHelper.getInstance(name);
+                for (Tag tag : instance.getTags()) {
+                    if (tag.getKey().equals("Name")) {
+                        return tag.getValue();
+                    }
+                }
+                throw new CloudException("Could not detect name for instance " + name);
+            case CloudProvider.AZURE_PROVIDER:
+                if (AzureHelper.getVirtualMachinesByName(name, restrictionMode) != null) {
+                    return AzureHelper.getVirtualMachinesByName(name, restrictionMode).get(0).name();
+                } else return null;
+            case CloudProvider.GCP_PROVIDER:
+                if (GcpHelper.getInstancesByName(name, ConfigPropertyValue.getGcpDataLabProjectId(), restrictionMode,
+                        GcpHelper.getAvailableZonesForProject(ConfigPropertyValue.getGcpDataLabProjectId())) != null) {
+                    return GcpHelper.getInstancesByName(name, ConfigPropertyValue.getGcpDataLabProjectId(),
+                            restrictionMode,
+                            GcpHelper.getAvailableZonesForProject(ConfigPropertyValue.getGcpDataLabProjectId()))
+                            .get(0).getName();
+                } else return null;
+            default:
+                return null;
+        }
+    }
+
+    public static String getClusterConfFileLocation(){
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+                return PropertiesResolver.getClusterEC2ConfFileLocation();
+            case CloudProvider.AZURE_PROVIDER:
+                return PropertiesResolver.getClusterAzureConfFileLocation();
+            case CloudProvider.GCP_PROVIDER:
+                return PropertiesResolver.getClusterGcpConfFileLocation();
+            default:
+                return null;
+        }
+    }
+
+
+    public static String getPythonTestingScript(){
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+                return "/usr/bin/python %s --storage %s --cloud aws --cluster_name %s --os_user %s";
+            case CloudProvider.AZURE_PROVIDER:
+                if(ConfigPropertyValue.getAzureDatalakeEnabled().equalsIgnoreCase("true")){
+                    return "/usr/bin/python %s --storage %s --cloud azure --cluster_name %s --os_user %s --azure_datalake_account "
+                            + ConfigPropertyValue.getAzureDatalakeSharedAccount();
+                }
+                else return "/usr/bin/python %s --storage %s --cloud azure --cluster_name %s --os_user %s --azure_storage_account "
+                        + ConfigPropertyValue.getAzureStorageSharedAccount();
+            case CloudProvider.GCP_PROVIDER:
+                return "/usr/bin/python %s --storage %s --cloud gcp --cluster_name %s --os_user %s";
+            default:
+                return null;
+        }
+    }
+
+    public static String getPythonTestingScript2(){
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+				return "/usr/bin/python /home/%s/%s --storage %s --notebook %s --cloud aws";
+            case CloudProvider.AZURE_PROVIDER:
+                if(ConfigPropertyValue.getAzureDatalakeEnabled().equalsIgnoreCase("true")){
+					return "/usr/bin/python /home/%s/%s --storage %s --notebook %s --cloud azure " +
+							"--azure_datalake_account " + ConfigPropertyValue.getAzureDatalakeSharedAccount();
+                } else return "/usr/bin/python /home/%s/%s --storage %s --notebook %s --cloud azure " +
+						"--azure_storage_account " + ConfigPropertyValue.getAzureStorageSharedAccount();
+            case CloudProvider.GCP_PROVIDER:
+				return "/usr/bin/python /home/%s/%s --storage %s --notebook %s --cloud gcp";
+            default:
+                return null;
+        }
+    }
+
+	static String getStorageNameAppendix() {
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+                return "bucket";
+            case CloudProvider.AZURE_PROVIDER:
+                if(ConfigPropertyValue.getAzureDatalakeEnabled().equalsIgnoreCase("true")){
+                    return "folder";
+                }
+                else return "container";
+            case CloudProvider.GCP_PROVIDER:
+                return "bucket";
+            default:
+                return null;
+        }
+    }
+
+	public static String getDockerTemplateFileForDES(boolean isSpotRequired) {
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+				return isSpotRequired ? "EMR_spot.json" : "EMR.json";
+            case CloudProvider.GCP_PROVIDER:
+                return "dataproc.json";
+            default:
+                return null;
+        }
+    }
+
+    public static Class<? extends DeployClusterDto> getDeployClusterClass() {
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+                return DeployEMRDto.class;
+            case CloudProvider.GCP_PROVIDER:
+                return DeployDataProcDto.class;
+            default:
+                return null;
+        }
+    }
+
+	public static DeployClusterDto populateDeployClusterDto(DeployClusterDto deployClusterDto,
+															NotebookConfig nbConfig) {
+		if (nbConfig.getDataEngineType().equals(NamingHelper.DATA_ENGINE_SERVICE) &&
+				ConfigPropertyValue.getCloudProvider().equals(CloudProvider.AWS_PROVIDER)) {
+			DeployEMRDto emrDto = (DeployEMRDto) deployClusterDto;
+			if (!StringUtils.isEmpty(nbConfig.getDesVersion())) {
+				emrDto.setEmrVersion(nbConfig.getDesVersion());
+			}
+			if (nbConfig.isDesSpotRequired() && nbConfig.getDesSpotPrice() > 0) {
+				emrDto.setEmrSlaveInstanceSpot(nbConfig.isDesSpotRequired());
+				emrDto.setEmrSlaveInstanceSpotPctPrice(nbConfig.getDesSpotPrice());
+			}
+			return emrDto;
+		} else return deployClusterDto;
+	}
+
+	static String getGcpDataprocClusterName(String gcpDataprocMasterNodeName) {
+        return gcpDataprocMasterNodeName != null ?
+                gcpDataprocMasterNodeName.substring(0, gcpDataprocMasterNodeName.lastIndexOf('-')) : null;
+	}
+
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/helper/CloudProvider.java b/integration-tests/src/main/java/com/epam/datalab/automation/helper/CloudProvider.java
new file mode 100644
index 0000000..23163f6
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/helper/CloudProvider.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.helper;
+
+public class CloudProvider {
+
+	public static final String AWS_PROVIDER = "aws";
+	public static final String AZURE_PROVIDER = "azure";
+	public static final String GCP_PROVIDER = "gcp";
+
+	private CloudProvider() {
+	}
+
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/helper/ConfigPropertyValue.java b/integration-tests/src/main/java/com/epam/datalab/automation/helper/ConfigPropertyValue.java
new file mode 100644
index 0000000..3ac7221
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/helper/ConfigPropertyValue.java
@@ -0,0 +1,387 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.helper;
+
+import com.epam.datalab.automation.exceptions.LoadFailException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.io.File;
+import java.io.FileReader;
+import java.nio.file.Paths;
+import java.time.Duration;
+import java.util.Properties;
+
+public class ConfigPropertyValue {
+
+	private static final Logger LOGGER = LogManager.getLogger(ConfigPropertyValue.class);
+	private static final String CONFIG_FILE_NAME;
+
+	public static final String JENKINS_USERNAME = "JENKINS_USERNAME";
+	public static final String JENKINS_PASS = "JENKINS_PASSWORD";
+	private static final String USERNAME = "USERNAME";
+	private static final String PASS = "PASSWORD";
+	private static final String NOT_IAM_USERNAME = "NOT_IAM_USERNAME";
+	private static final String NOT_IAM_PASS = "NOT_IAM_PASSWORD";
+	private static final String NOT_DATALAB_USERNAME = "NOT_DATALAB_USERNAME";
+	private static final String NOT_DATALAB_PASS = "NOT_DATALAB_PASSWORD";
+	private static final String JENKINS_JOB_URL = "JENKINS_JOB_URL";
+	private static final String USER_FOR_ACTIVATE_KEY = "USER_FOR_ACTIVATE_KEY";
+	private static final String PASS_FOR_ACTIVATE_KEY = "PASSWORD_FOR_ACTIVATE_KEY";
+	private static final String ACCESS_KEY_PRIV_FILE_NAME = "ACCESS_KEY_PRIV_FILE_NAME";
+	private static final String ACCESS_KEY_PUB_FILE_NAME = "ACCESS_KEY_PUB_FILE_NAME";
+
+	private static final String CLOUD_PROVIDER = "CLOUD_PROVIDER";
+
+	private static final String AWS_ACCESS_KEY_ID = "AWS_ACCESS_KEY_ID";
+	private static final String AWS_SECRET_ACCESS_KEY = "AWS_SECRET_ACCESS_KEY";
+	private static final String AWS_REGION = "AWS_REGION";
+	private static final String AWS_REQUEST_TIMEOUT = "AWS_REQUEST_TIMEOUT";
+
+	private static final String AZURE_REGION = "AZURE_REGION";
+	private static final String AZURE_REQUEST_TIMEOUT = "AZURE_REQUEST_TIMEOUT";
+	private static final String AZURE_DATALAKE_ENABLED = "AZURE_DATALAKE_ENABLED";
+	private static final String AZURE_DATALAKE_SHARED_ACCOUNT = "AZURE_DATALAKE_SHARED_ACCOUNT";
+	private static final String AZURE_STORAGE_SHARED_ACCOUNT = "AZURE_STORAGE_SHARED_ACCOUNT";
+	private static final String AZURE_AUTHENTICATION_FILE = "AZURE_AUTHENTICATION_FILE";
+
+	private static final String GCP_DATALAB_PROJECT_ID = "GCP_DATALAB_PROJECT_ID";
+	private static final String GCP_REGION = "GCP_REGION";
+	private static final String GCP_REQUEST_TIMEOUT = "GCP_REQUEST_TIMEOUT";
+	private static final String GCP_AUTHENTICATION_FILE = "GCP_AUTHENTICATION_FILE";
+
+	private static final String TIMEOUT_JENKINS_AUTOTEST = "TIMEOUT_JENKINS_AUTOTEST";
+	private static final String TIMEOUT_UPLOAD_KEY = "TIMEOUT_UPLOAD_KEY";
+	private static final String TIMEOUT_SSN_STARTUP = "TIMEOUT_SSN_STARTUP";
+
+	private static final String CLUSTER_OS_USERNAME = "CLUSTER_OS_USERNAME";
+	private static final String CLUSTER_OS_FAMILY = "CLUSTER_OS_FAMILY";
+    private static final String CONF_TAG_RESOURCE_ID = "CONF_TAG_RESOURCE_ID";
+
+	private static final String JUPYTER_SCENARIO_FILES = "JUPYTER_SCENARIO_FILES";
+	private static final String NOTEBOOKS_TO_TEST = "NOTEBOOKS_TO_TEST";
+	private static final String SKIPPED_LIBS = "SKIPPED_LIBS";
+	private static final String EXECUTION_TREADS = "execution.threads";
+
+    private static final String USE_JENKINS = "USE_JENKINS";
+    private static final String SSN_URL = "SSN_URL";
+    private static final String SERVICE_BASE_NAME = "SERVICE_BASE_NAME";
+    private static final String RUN_MODE_LOCAL = "RUN_MODE_LOCAL";
+    private static final String LOCALHOST_IP = "LOCALHOST_IP";
+
+    private static String jenkinsBuildNumber;
+
+
+    private static final Properties props = new Properties();
+
+    static {
+        CONFIG_FILE_NAME = PropertiesResolver.getConfFileLocation();
+        jenkinsBuildNumber = System.getProperty("jenkins.buildNumber", "");
+        if (jenkinsBuildNumber.isEmpty()) {
+            jenkinsBuildNumber = null;
+            LOGGER.info("Jenkins build number missed");
+        }
+        
+    	loadProperties();
+    }
+    
+    private ConfigPropertyValue() { }
+	
+    private static Duration getDuration(String duaration) {
+    	return Duration.parse("PT" + duaration);
+    }
+    
+	public static String get(String propertyName) {
+		return get(propertyName, "");
+	}
+
+	public static String get(String propertyName, String defaultValue) {
+		return props.getProperty(propertyName, defaultValue);
+	}
+
+	private static int getInt(String value) {
+        return Integer.parseInt(value);
+    }
+	
+	public static int get(String propertyName, int defaultValue) {
+		if (props.values().isEmpty()) {
+			loadProperties();
+		}
+		String s = props.getProperty(propertyName, String.valueOf(defaultValue)); 
+		return Integer.parseInt(s);
+	}
+	
+	private static void printProperty(String propertyName) {
+        LOGGER.info("{} is {}", propertyName , props.getProperty(propertyName));
+	}
+	
+	private static void setKeyProperty(String propertyName) {
+		String s = props.getProperty(propertyName, "");
+		if (!s.isEmpty()) {
+            s = Paths.get(PropertiesResolver.getKeysLocation(), s).toAbsolutePath().toString();
+            props.setProperty(propertyName, s);
+        }
+	}
+	
+	private static void loadProperties() {
+        try (FileReader fin = new FileReader(new File(CONFIG_FILE_NAME))) {
+
+	        props.load(fin);
+
+	        PropertiesResolver.overlapProperty(props, CLUSTER_OS_USERNAME, true);
+	        PropertiesResolver.overlapProperty(props, CLUSTER_OS_FAMILY, true);
+	        PropertiesResolver.overlapProperty(props, AWS_REGION, true);
+	        PropertiesResolver.overlapProperty(props, AZURE_REGION, true);
+	        PropertiesResolver.overlapProperty(props, GCP_DATALAB_PROJECT_ID, true);
+	        PropertiesResolver.overlapProperty(props, GCP_REGION, true);
+	        PropertiesResolver.overlapProperty(props, NOTEBOOKS_TO_TEST, false);
+	        PropertiesResolver.overlapProperty(props, SKIPPED_LIBS, true);
+	        PropertiesResolver.overlapProperty(props, USE_JENKINS, true);
+	        PropertiesResolver.overlapProperty(props, JENKINS_JOB_URL, !isUseJenkins());
+	        PropertiesResolver.overlapProperty(props, SSN_URL, isUseJenkins());
+	        PropertiesResolver.overlapProperty(props, SERVICE_BASE_NAME, isUseJenkins());
+	        PropertiesResolver.overlapProperty(props, RUN_MODE_LOCAL, true);
+
+	        setKeyProperty(ACCESS_KEY_PRIV_FILE_NAME);
+            setKeyProperty(ACCESS_KEY_PUB_FILE_NAME);
+        } catch (Exception e) {
+        	LOGGER.fatal("Load properties from file {} fails.", CONFIG_FILE_NAME, e);
+	        throw new LoadFailException("Load properties from \"" + CONFIG_FILE_NAME + "\" fails. " +
+			        e.getLocalizedMessage(), e);
+        }
+
+		printProperty(JENKINS_USERNAME);
+		printProperty(JENKINS_PASS);
+		printProperty(USERNAME);
+		printProperty(PASS);
+		printProperty(NOT_IAM_USERNAME);
+		printProperty(NOT_IAM_PASS);
+		printProperty(NOT_DATALAB_USERNAME);
+		printProperty(NOT_DATALAB_PASS);
+		printProperty(JENKINS_JOB_URL);
+		printProperty(USER_FOR_ACTIVATE_KEY);
+		printProperty(PASS_FOR_ACTIVATE_KEY);
+		printProperty(ACCESS_KEY_PRIV_FILE_NAME);
+		printProperty(ACCESS_KEY_PUB_FILE_NAME);
+
+		printProperty(TIMEOUT_JENKINS_AUTOTEST);
+		printProperty(TIMEOUT_UPLOAD_KEY);
+		printProperty(TIMEOUT_SSN_STARTUP);
+
+        printProperty(JUPYTER_SCENARIO_FILES);
+        printProperty(CLOUD_PROVIDER);
+
+        printProperty(AZURE_DATALAKE_ENABLED);
+        printProperty(AZURE_DATALAKE_SHARED_ACCOUNT);
+        printProperty(AZURE_STORAGE_SHARED_ACCOUNT);
+        printProperty(NOTEBOOKS_TO_TEST);
+		printProperty(SKIPPED_LIBS);
+		printProperty(CLUSTER_OS_USERNAME);
+        printProperty(CLUSTER_OS_FAMILY);
+        printProperty(CONF_TAG_RESOURCE_ID);
+
+        printProperty(USE_JENKINS);
+        printProperty(RUN_MODE_LOCAL);
+        printProperty(LOCALHOST_IP);
+	}
+    
+    
+    public static String getJenkinsBuildNumber() {
+    	return jenkinsBuildNumber;
+    }
+
+    public static void setJenkinsBuildNumber(String jenkinsBuildNumber) {
+    	ConfigPropertyValue.jenkinsBuildNumber = jenkinsBuildNumber;
+    }
+
+    public static String getJenkinsUsername() {
+    	return get(JENKINS_USERNAME);
+    }
+    
+    public static String getJenkinsPassword() {
+		return get(JENKINS_PASS);
+    }
+
+    public static String getUsername() {
+    	return get(USERNAME);
+    }
+    
+    public static String getUsernameSimple() {
+    	String s = get(USERNAME);
+		int i = s.indexOf('@');
+		return (i == -1 ? s : s.substring(0, i).toLowerCase());
+	}
+
+    public static String getPassword() {
+		return get(PASS);
+    }
+
+	public static String getNotIAMUsername() {
+		return get(NOT_IAM_USERNAME);
+	}
+
+	public static String getNotIAMPassword() {
+		return get(NOT_IAM_PASS);
+	}
+
+	public static String getNotDataLabUsername() {
+		return get(NOT_DATALAB_USERNAME);
+	}
+
+	public static String getNotDataLabPassword() {
+		return get(NOT_DATALAB_PASS);
+	}
+
+	public static String getJenkinsJobURL() {
+		return get(JENKINS_JOB_URL);
+	}
+
+	public static String getUserForActivateKey() {
+		return get(USER_FOR_ACTIVATE_KEY);
+	}
+
+    public static String getPasswordForActivateKey() {
+		return get(PASS_FOR_ACTIVATE_KEY);
+    }
+
+
+    public static String getAccessKeyPrivFileName() {
+    	File file = new File(get(ACCESS_KEY_PRIV_FILE_NAME));
+        return file.getAbsolutePath();
+    }
+
+    public static String getAccessKeyPubFileName() {
+    	File file = new File(get(ACCESS_KEY_PUB_FILE_NAME));
+        return file.getAbsolutePath();
+    }
+
+    public static String getCloudProvider(){
+        return get(CLOUD_PROVIDER);
+    }
+
+    public static String getAzureAuthFileName(){
+        File file = new File(get(AZURE_AUTHENTICATION_FILE));
+        return file.getAbsolutePath();
+    }
+
+	public static String getGcpAuthFileName() {
+		File file = new File(get(GCP_AUTHENTICATION_FILE));
+		return file.getAbsolutePath();
+	}
+
+    public static String getAwsAccessKeyId() {
+        return get(AWS_ACCESS_KEY_ID);
+    }
+
+    public static String getAwsSecretAccessKey() {
+        return get(AWS_SECRET_ACCESS_KEY);
+    }
+
+	public static String getAwsRegion() {
+	    return get(AWS_REGION);
+	}
+
+	public static Duration getAwsRequestTimeout() {
+    	return getDuration(get(AWS_REQUEST_TIMEOUT, "10s"));
+    }
+
+    public static String getAzureRegion() {
+        return get(AZURE_REGION);
+    }
+
+    public static String getAzureDatalakeEnabled() {
+        return get(AZURE_DATALAKE_ENABLED);
+    }
+
+	public static String getAzureDatalakeSharedAccount() {
+		return get(AZURE_DATALAKE_SHARED_ACCOUNT);
+	}
+
+	public static String getAzureStorageSharedAccount() {
+		return get(AZURE_STORAGE_SHARED_ACCOUNT);
+	}
+
+	public static String getGcpDataLabProjectId() {
+		return get(GCP_DATALAB_PROJECT_ID);
+	}
+
+	public static String getGcpRegion() {
+		return get(GCP_REGION);
+	}
+
+	public static Duration getGcpRequestTimeout() {
+		return getDuration(get(GCP_REQUEST_TIMEOUT, "10s"));
+	}
+
+    public static Duration getAzureRequestTimeout() {
+        return getDuration(get(AZURE_REQUEST_TIMEOUT, "10s"));
+    }
+
+    public static Duration getTimeoutJenkinsAutotest() {
+    	return getDuration(get(TIMEOUT_JENKINS_AUTOTEST, "0s"));
+    }
+
+    public static int getExecutionThreads() {
+        return getInt(get(EXECUTION_TREADS, "-1"));
+    }
+
+    public static Duration getTimeoutUploadKey() {
+    	return getDuration(get(TIMEOUT_UPLOAD_KEY, "0s"));
+    }
+
+    public static Duration getTimeoutSSNStartup() {
+    	return getDuration(get(TIMEOUT_SSN_STARTUP, "0s"));
+    }
+
+
+    public static String getClusterOsUser() {
+    	return get(CLUSTER_OS_USERNAME);
+    }
+
+    public static String getClusterOsFamily() {
+    	return get(CLUSTER_OS_FAMILY);
+    }
+
+    public static String getNotebookTemplates() {
+    	return get(NOTEBOOKS_TO_TEST);
+    }
+
+	public static String getSkippedLibs() {
+		return get(SKIPPED_LIBS, "[]");
+	}
+
+	public static boolean isUseJenkins() {
+        String s = get(USE_JENKINS, "true");
+    	return Boolean.valueOf(s);
+    }
+    
+    public static String getSsnUrl() {
+        return get(SSN_URL);
+    }
+    
+    public static String getServiceBaseName() {
+        return get(SERVICE_BASE_NAME);
+    }
+    
+    public static boolean isRunModeLocal() {
+    	String s = get(RUN_MODE_LOCAL, "false");
+    	return Boolean.valueOf(s);
+    }
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/helper/NamingHelper.java b/integration-tests/src/main/java/com/epam/datalab/automation/helper/NamingHelper.java
new file mode 100644
index 0000000..44fe86e
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/helper/NamingHelper.java
@@ -0,0 +1,283 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.helper;
+
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class NamingHelper {
+	public static final String CLUSTER_ABSENT = "cluster_absent";
+	public static final String DATA_ENGINE = "dataengine";
+	public static final String DATA_ENGINE_SERVICE = "dataengine-service";
+	public static final String DEEPLEARNING = "deeplearning";
+	public static final String JUPYTER = "jupyter";
+	public static final String TENSOR = "tensor";
+	public static final String RSTUDIO = "rstudio";
+	public static final String ZEPPELIN = "zeppelin";
+
+	private static final Map<String, String> SIMPLE_NOTEBOOK_NAMES = new HashMap<>();
+
+    private static AtomicInteger idCounter = new AtomicInteger(0);
+    
+    private static String serviceBaseName;
+    private static String ssnURL;
+    private static String ssnIp;
+    private static String ssnToken;
+
+	static {
+		SIMPLE_NOTEBOOK_NAMES.put(DEEPLEARNING, "dlr");
+		SIMPLE_NOTEBOOK_NAMES.put(JUPYTER, "jup");
+		SIMPLE_NOTEBOOK_NAMES.put(TENSOR, "tfl");
+		SIMPLE_NOTEBOOK_NAMES.put(RSTUDIO, "rst");
+		SIMPLE_NOTEBOOK_NAMES.put(ZEPPELIN, "zep");
+	}
+
+    private NamingHelper(){}
+
+	public static Map<String, String> getSimpleNotebookNames() {
+		return SIMPLE_NOTEBOOK_NAMES;
+	}
+
+	public static String getServiceBaseName() {
+    	return serviceBaseName;
+    }
+    
+    public static void setServiceBaseName(String serviceBaseName) {
+    	if (NamingHelper.serviceBaseName != null) {
+    		throw new IllegalArgumentException("Field serviceBaseName already has a value");
+    	}
+    	NamingHelper.serviceBaseName = serviceBaseName;
+    }
+    
+    public static String getSsnURL() {
+    	return ssnURL;
+    }
+    
+    public static void setSsnURL(String ssnURL) {
+    	if (NamingHelper.ssnURL != null) {
+    		throw new IllegalArgumentException("Field ssnURL already has a value");
+    	}
+    	NamingHelper.ssnURL = ssnURL;
+    }
+
+    public static String getSsnName() {
+    	return serviceBaseName + "-ssn";
+    }
+    
+    public static String getSsnIp() {
+    	return ssnIp;
+    }
+    
+    public static void setSsnIp(String ssnIp) {
+    	if (NamingHelper.ssnIp != null) {
+    		throw new IllegalArgumentException("Field ssnIp already has a value");
+    	}
+    	NamingHelper.ssnIp = ssnIp;
+    }
+
+    public static String getSsnToken() {
+    	return ssnToken;
+    }
+    
+    public static void setSsnToken(String ssnToken) {
+    	if (NamingHelper.ssnToken != null) {
+    		throw new IllegalArgumentException("Field ssnToken already has a value");
+    	}
+    	NamingHelper.ssnToken = ssnToken;
+    }
+    
+    public static String getEdgeName() {
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+				return String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(), "edge");
+            case CloudProvider.AZURE_PROVIDER:
+			case CloudProvider.GCP_PROVIDER:
+				return String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(), "edge")
+                        .replace('_', '-');
+			default:
+                return null;
+        }
+    }
+    
+    public static String getNotebookInstanceName(String notebookName) {
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+				return String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(), "nb", notebookName);
+            case CloudProvider.AZURE_PROVIDER:
+			case CloudProvider.GCP_PROVIDER:
+				return String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(), "nb", notebookName)
+                        .replace('_', '-');
+			default:
+                return null;
+        }
+    }
+    
+    public static String getClusterInstanceName(String notebookName, String clusterName, String dataEngineType) {
+		if (DATA_ENGINE.equals(dataEngineType)) {
+            switch (ConfigPropertyValue.getCloudProvider()) {
+                case CloudProvider.AWS_PROVIDER:
+					return String.join("-", getClusterInstanceNameForTestDES(notebookName, clusterName,
+							dataEngineType), "m");
+                case CloudProvider.AZURE_PROVIDER:
+				case CloudProvider.GCP_PROVIDER:
+					return String.join("-", getClusterInstanceNameForTestDES(notebookName, clusterName,
+							dataEngineType), "m").replace('_', '-');
+				default:
+                    return null;
+            }
+    	}
+    	else {
+    		return getClusterInstanceNameForTestDES(notebookName,clusterName,dataEngineType);
+    	}
+    }
+    
+    public static String getClusterInstanceNameForTestDES(String notebookName, String clusterName, String dataEngineType) {
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+				return DATA_ENGINE.equals(dataEngineType) ?
+						String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(),
+								"de", notebookName, clusterName) :
+						String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(),
+								"des", notebookName, clusterName);
+
+            case CloudProvider.AZURE_PROVIDER:
+				return DATA_ENGINE.equals(dataEngineType) ?
+						String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(),
+								"de", notebookName, clusterName).replace('_', '-') :
+						String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(),
+								"des", notebookName, clusterName).replace('_', '-');
+
+			case CloudProvider.GCP_PROVIDER:
+				return DATA_ENGINE.equals(dataEngineType) ?
+						String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(),
+								"de", notebookName, clusterName).replace('_', '-') :
+						String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(),
+								"des", notebookName, clusterName, "m").replace('_', '-');
+			default:
+                return null;
+        }
+
+    }
+
+	public static String getNotebookContainerName(String notebookName, String action) {
+    	return String.join("_", ConfigPropertyValue.getUsernameSimple(), action, "exploratory", notebookName);
+    }
+
+	public static String getClusterContainerName(String notebookName, String clusterName, String action) {
+		return String.join("_", ConfigPropertyValue.getUsernameSimple(), action, "computational",
+				notebookName, clusterName);
+    }
+    
+    public static String generateRandomValue() {
+		SimpleDateFormat df = new SimpleDateFormat("yyyyMMddhmmss");
+        return String.join("_",  "ITest", df.format(new Date()), String.valueOf(idCounter.incrementAndGet()));
+    }
+
+    public static String generateRandomValue(String notebokTemplateName) {
+		return String.join("_", SIMPLE_NOTEBOOK_NAMES.get(notebokTemplateName),
+				String.valueOf(idCounter.incrementAndGet()));
+    }
+    
+    public static String getSelfServiceURL(String path) {
+        return ssnURL + path;
+    }
+    
+    public static String getStorageName() {
+        switch (ConfigPropertyValue.getCloudProvider()) {
+            case CloudProvider.AWS_PROVIDER:
+			case CloudProvider.GCP_PROVIDER:
+                return String.format("%s-%s-%s", serviceBaseName, ConfigPropertyValue.getUsernameSimple(),
+                        CloudHelper.getStorageNameAppendix()).replace('_', '-').toLowerCase();
+            case CloudProvider.AZURE_PROVIDER:
+                return String.format("%s-%s-%s", serviceBaseName, "shared",
+                        CloudHelper.getStorageNameAppendix()).replace('_', '-').toLowerCase();
+			default:
+                return null;
+        }
+    }
+
+	public static String getClusterName(String clusterInstanceName, String dataEngineType, boolean restrictionMode)
+			throws IOException {
+		switch (ConfigPropertyValue.getCloudProvider()) {
+			case CloudProvider.AWS_PROVIDER:
+			case CloudProvider.AZURE_PROVIDER:
+				return DATA_ENGINE.equals(dataEngineType) ? clusterInstanceName :
+						CloudHelper.getInstanceNameByCondition(clusterInstanceName, restrictionMode);
+
+			case CloudProvider.GCP_PROVIDER:
+				return DATA_ENGINE.equals(dataEngineType) ? clusterInstanceName :
+						CloudHelper.getGcpDataprocClusterName(
+								CloudHelper.getInstanceNameByCondition(clusterInstanceName, restrictionMode));
+			default:
+				return null;
+		}
+    }
+
+	public static String getNotebookTestTemplatesPath(String notebookName) {
+		if (notebookName.contains(getSimpleNotebookNames().get(DEEPLEARNING))) {
+            return "test_templates/deeplearning/";
+		} else if (notebookName.contains(getSimpleNotebookNames().get(JUPYTER))) {
+            return "test_templates/jupyter/";
+		} else if (notebookName.contains(getSimpleNotebookNames().get(RSTUDIO))) {
+            return "test_templates/rstudio/";
+		} else if (notebookName.contains(getSimpleNotebookNames().get(TENSOR))) {
+            return "test_templates/tensor/";
+		} else if (notebookName.contains(getSimpleNotebookNames().get(ZEPPELIN))) {
+            return "test_templates/zeppelin/";
+        }
+        else return "";
+
+    }
+
+    public static String getNotebookType(String notebookName){
+		if (notebookName.contains(getSimpleNotebookNames().get(DEEPLEARNING))) {
+			return DEEPLEARNING + "/";
+		} else if (notebookName.contains(getSimpleNotebookNames().get(JUPYTER))) {
+			return JUPYTER + "/";
+		} else if (notebookName.contains(getSimpleNotebookNames().get(RSTUDIO))) {
+			return RSTUDIO + "/";
+		} else if (notebookName.contains(getSimpleNotebookNames().get(TENSOR))) {
+			return TENSOR + "/";
+		} else if (notebookName.contains(getSimpleNotebookNames().get(ZEPPELIN))) {
+			return ZEPPELIN + "/";
+        }
+        else return "";
+
+    }
+
+	public static boolean isClusterRequired(String notebookName) {
+		if (notebookName.contains(getSimpleNotebookNames().get(DEEPLEARNING))) {
+			return false;
+		} else if (notebookName.contains(getSimpleNotebookNames().get(JUPYTER))) {
+			return true;
+		} else if (notebookName.contains(getSimpleNotebookNames().get(RSTUDIO))) {
+			return true;
+		} else if (notebookName.contains(getSimpleNotebookNames().get(TENSOR))) {
+			return false;
+		} else if (notebookName.contains(getSimpleNotebookNames().get(ZEPPELIN))) {
+			return true;
+		}
+		return true;
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/helper/PropertiesResolver.java b/integration-tests/src/main/java/com/epam/datalab/automation/helper/PropertiesResolver.java
new file mode 100644
index 0000000..eeabe1b
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/helper/PropertiesResolver.java
@@ -0,0 +1,217 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.helper;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Paths;
+import java.util.Properties;
+
+public class PropertiesResolver {
+
+    private static final Logger LOGGER = LogManager.getLogger(PropertiesResolver.class);
+    public static final boolean DEV_MODE;
+	private static final String CONFIG_FILE_NAME = "application.properties";
+	public static final String NOTEBOOK_SCENARIO_FILES_LOCATION_PROPERTY_TEMPLATE = "scenario.%s.files.location";
+	public static final String NOTEBOOK_TEST_TEMPLATES_LOCATION = "%s.test.templates.location";
+	public static final String NOTEBOOK_CONFIGURATION_FILE_TEMPLATE = "%s/%s-notebook.json";
+
+    //keys from application.properties(dev-application.properties)
+	private static final String CONF_FILE_LOCATION_PROPERTY = "conf.file.location";
+	private static final String KEYS_DIRECTORY_LOCATION_PROPERTY = "keys.directory.location";
+	private static final String NOTEBOOK_TEST_DATA_COPY_SCRIPT = "notebook.test.data.copy.script";
+	private static final String NOTEBOOK_TEST_LIB_LOCATION = "notebook.test.lib.location";
+
+	private static final String SCENARIO_JUPYTER_FILES_LOCATION_PROPERTY = "scenario.jupyter.files.location";
+	private static final String SCENARIO_RSTUDIO_FILES_LOCATION_PROPERTY = "scenario.rstudio.files.location";
+	private static final String SCENARIO_ZEPPELIN_FILES_LOCATION_PROPERTY = "scenario.zeppelin.files.location";
+	private static final String SCENARIO_TENSOR_FILES_LOCATION_PROPERTY = "scenario.tensor.files.location";
+	private static final String SCENARIO_DEEPLEARNING_FILES_LOCATION_PROPERTY = "scenario.deeplearning.files.location";
+
+	private static final String JUPYTER_TEST_TEMPLATES_LOCATION_PROPERTY = "jupyter.test.templates.location";
+	private static final String RSTUDIO_TEST_TEMPLATES_LOCATION_PROPERTY = "rstudio.test.templates.location";
+	private static final String ZEPPELIN_TEST_TEMPLATES_LOCATION_PROPERTY = "zeppelin.test.templates.location";
+	private static final String TENSOR_TEST_TEMPLATES_LOCATION_PROPERTY = "tensor.test.templates.location";
+	private static final String DEEPLEARNING_TEST_TEMPLATES_LOCATION_PROPERTY = "deeplearning.test.templates.location";
+
+	private static final String CLUSTER_CONFIG_FILE_LOCATION_PROPERTY = "ec2.config.files.location";
+	private static final String AZURE_CONFIG_FILE_LOCATION_PROPERTY = "azure.config.files.location";
+	private static final String GCP_CONFIG_FILE_LOCATION_PROPERTY = "gcp.config.files.location";
+
+	private PropertiesResolver() {
+	}
+
+    public static String getJupyterTestTemplatesLocationProperty() {
+        return JUPYTER_TEST_TEMPLATES_LOCATION_PROPERTY;
+    }
+
+    public static String getRstudioTestTemplatesLocationProperty() {
+        return RSTUDIO_TEST_TEMPLATES_LOCATION_PROPERTY;
+    }
+
+    public static String getZeppelinTestTemplatesLocationProperty() {
+        return ZEPPELIN_TEST_TEMPLATES_LOCATION_PROPERTY;
+    }
+
+    public static String getTensorTestTemplatesLocationProperty() {
+        return TENSOR_TEST_TEMPLATES_LOCATION_PROPERTY;
+    }
+
+    public static String getDeeplearningTestTemplatesLocationProperty() {
+        return DEEPLEARNING_TEST_TEMPLATES_LOCATION_PROPERTY;
+    }
+
+    private static Properties properties = new Properties();
+
+    static {
+        DEV_MODE = System.getProperty("run.mode", "remote").equalsIgnoreCase("dev");
+        loadApplicationProperties();
+    }
+
+	private static String getProperty(String propertyName, boolean isOptional) {
+		String s = System.getProperty(propertyName, "");
+		if (s.isEmpty() && !isOptional) {
+        	throw new IllegalArgumentException("Missed required JVM argument -D" + propertyName);
+        }
+        return s;
+	}
+	
+	public static void overlapProperty(Properties props, String propertyName, boolean isOptional) {
+		String argName = StringUtils.replaceChars(propertyName, '_', '.').toLowerCase();
+		String s = System.getProperty(argName, "");
+		if (!s.isEmpty()) {
+            props.setProperty(propertyName, s);
+        }
+		if(!isOptional && props.getProperty(propertyName, "").isEmpty()) {
+        	throw new IllegalArgumentException("Missed required argument -D" + argName + " or property " + propertyName);
+        }
+	}
+
+
+    private static String getConfRootPath() {
+    	return getProperty("conf.root.path", false);
+    }
+
+    private static void loadApplicationProperties() {
+        InputStream input = null;
+
+        try {
+            input = PropertiesResolver.class.getClassLoader().getResourceAsStream(CONFIG_FILE_NAME);
+
+            // load a properties file
+            properties.load(input);
+            String rootPath = getConfRootPath();
+            for (String key : properties.keySet().toArray(new String[0])) {
+            	String path = StringUtils.replace(properties.getProperty(key), "${CONF_ROOT_PATH}", rootPath);
+            	path = Paths.get(path).toAbsolutePath().toString();
+            	properties.setProperty(key, path);
+            }
+            overlapProperty(properties, CONF_FILE_LOCATION_PROPERTY, false);
+
+            // get the property value and print it out
+            LOGGER.info(properties.getProperty(CONF_FILE_LOCATION_PROPERTY));
+            LOGGER.info(properties.getProperty(KEYS_DIRECTORY_LOCATION_PROPERTY));
+            LOGGER.info(properties.getProperty(NOTEBOOK_TEST_DATA_COPY_SCRIPT));
+            LOGGER.info(properties.getProperty(NOTEBOOK_TEST_LIB_LOCATION));
+            LOGGER.info(properties.getProperty(SCENARIO_JUPYTER_FILES_LOCATION_PROPERTY));
+            LOGGER.info(properties.getProperty(SCENARIO_RSTUDIO_FILES_LOCATION_PROPERTY));
+            LOGGER.info(properties.getProperty(SCENARIO_ZEPPELIN_FILES_LOCATION_PROPERTY));
+            LOGGER.info(properties.getProperty(SCENARIO_TENSOR_FILES_LOCATION_PROPERTY));
+            LOGGER.info(properties.getProperty(SCENARIO_DEEPLEARNING_FILES_LOCATION_PROPERTY));
+            LOGGER.info(properties.getProperty(JUPYTER_TEST_TEMPLATES_LOCATION_PROPERTY));
+            LOGGER.info(properties.getProperty(RSTUDIO_TEST_TEMPLATES_LOCATION_PROPERTY));
+            LOGGER.info(properties.getProperty(ZEPPELIN_TEST_TEMPLATES_LOCATION_PROPERTY));
+            LOGGER.info(properties.getProperty(TENSOR_TEST_TEMPLATES_LOCATION_PROPERTY));
+            LOGGER.info(properties.getProperty(DEEPLEARNING_TEST_TEMPLATES_LOCATION_PROPERTY));
+            LOGGER.info(properties.getProperty(CLUSTER_CONFIG_FILE_LOCATION_PROPERTY));
+
+        } catch (IOException ex) {
+            LOGGER.error(ex);
+            LOGGER.error("Application configuration file could not be found by the path: {}", CONFIG_FILE_NAME);
+            System.exit(0);
+        } finally {
+            if (input != null) {
+                try {
+                    input.close();
+                } catch (IOException e) {
+                    LOGGER.error(e);
+                    LOGGER.error("Application configuration file could not be found by the path: {}", CONFIG_FILE_NAME);
+                }
+            }
+        }
+    }
+
+
+    public static String getConfFileLocation() {
+        return properties.getProperty(CONF_FILE_LOCATION_PROPERTY);
+    }
+
+    public static String getKeysLocation() {
+        return properties.getProperty(KEYS_DIRECTORY_LOCATION_PROPERTY);
+    }
+
+    public static String getNotebookTestDataCopyScriptLocation() {
+        return properties.getProperty(NOTEBOOK_TEST_DATA_COPY_SCRIPT);
+    }
+
+    public static String getNotebookTestLibLocation() {
+        return properties.getProperty(NOTEBOOK_TEST_LIB_LOCATION);
+    }
+
+    public static String getScenarioJupyterFilesLocation() {
+        return properties.getProperty(SCENARIO_JUPYTER_FILES_LOCATION_PROPERTY);
+    }
+
+    public static String getScenarioRstudioFilesLocation() {
+        return properties.getProperty(SCENARIO_RSTUDIO_FILES_LOCATION_PROPERTY);
+    }
+
+    public static String getScenarioZeppelinFilesLocation() {
+        return properties.getProperty(SCENARIO_ZEPPELIN_FILES_LOCATION_PROPERTY);
+    }
+
+    public static String getScenarioTensorFilesLocation() {
+        return properties.getProperty(SCENARIO_TENSOR_FILES_LOCATION_PROPERTY);
+    }
+
+    public static String getScenarioDeeplearningFilesLocation() {
+        return properties.getProperty(SCENARIO_DEEPLEARNING_FILES_LOCATION_PROPERTY);
+    }
+
+    public static String getClusterEC2ConfFileLocation() {
+        return properties.getProperty(CLUSTER_CONFIG_FILE_LOCATION_PROPERTY );
+    }
+
+    public static String getClusterAzureConfFileLocation() {
+        return properties.getProperty(AZURE_CONFIG_FILE_LOCATION_PROPERTY );
+    }
+
+    public static String getClusterGcpConfFileLocation() {
+        return properties.getProperty(GCP_CONFIG_FILE_LOCATION_PROPERTY);
+    }
+
+    public static String getPropertyByName(String propertyName) {
+        return properties.getProperty(propertyName);
+    }
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/helper/WaitForStatus.java b/integration-tests/src/main/java/com/epam/datalab/automation/helper/WaitForStatus.java
new file mode 100644
index 0000000..bccbef0
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/helper/WaitForStatus.java
@@ -0,0 +1,205 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.helper;
+
+import com.epam.datalab.automation.http.ContentType;
+import com.epam.datalab.automation.http.HttpRequest;
+import com.epam.datalab.automation.http.HttpStatusCode;
+import com.jayway.restassured.path.json.JsonPath;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.time.Duration;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+import java.util.function.Predicate;
+import java.util.stream.Stream;
+
+public class WaitForStatus {
+
+	private static final Logger LOGGER = LogManager.getLogger(WaitForStatus.class);
+	private static final String EXPLORATORY_PATH = "exploratory";
+
+	private static long getSsnRequestTimeout() {
+		return ConfigPropertyValue.isRunModeLocal() ? 1000 : 10000;
+	}
+
+	private WaitForStatus() {
+	}
+
+	public static boolean selfService(Duration duration) throws InterruptedException {
+		HttpRequest request = new HttpRequest();
+		int actualStatus;
+		long timeout = duration.toMillis();
+		long expiredTime = System.currentTimeMillis() + timeout;
+
+		while ((actualStatus = request.webApiGet(NamingHelper.getSsnURL(), ContentType.TEXT).statusCode()) !=
+				HttpStatusCode.OK) {
+			if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
+				break;
+			}
+			Thread.sleep(getSsnRequestTimeout());
+		}
+
+		if (actualStatus != HttpStatusCode.OK) {
+			LOGGER.info("ERROR: Timeout has been expired for SSN available. Timeout was {}", duration);
+			return false;
+		} else {
+			LOGGER.info("Current status code for SSN is {}", actualStatus);
+		}
+
+		return true;
+	}
+
+	public static int uploadKey(String url, String token, int status, Duration duration)
+			throws InterruptedException {
+		LOGGER.info(" Waiting until status code {} with URL {} with token {}", status, url, token);
+		HttpRequest request = new HttpRequest();
+		int actualStatus;
+		long timeout = duration.toMillis();
+		long expiredTime = System.currentTimeMillis() + timeout;
+
+		while ((actualStatus = request.webApiGet(url, token).getStatusCode()) == status) {
+			if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
+				break;
+			}
+			Thread.sleep(getSsnRequestTimeout());
+		}
+
+		if (actualStatus == status) {
+			LOGGER.info("ERROR: {}: Timeout has been expired for request.");
+			LOGGER.info("  URL is {}", url);
+			LOGGER.info("  token is {}", token);
+			LOGGER.info("  status is {}", status);
+			LOGGER.info("  timeout is {}", duration);
+		} else {
+			LOGGER.info(" Current status code for {} is {}", url, actualStatus);
+		}
+
+		return actualStatus;
+	}
+
+	public static String notebook(String url, String token, String notebookName, String status, Duration duration)
+			throws InterruptedException {
+		LOGGER.info("Waiting for status {} with URL {} with token {} for notebook {}", status, url, token,
+				notebookName);
+		HttpRequest request = new HttpRequest();
+		String actualStatus;
+		long timeout = duration.toMillis();
+		long expiredTime = System.currentTimeMillis() + timeout;
+
+		do {
+			actualStatus = getNotebookStatus(request.webApiGet(url, token)
+					.getBody()
+					.jsonPath(), notebookName);
+			if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
+				break;
+			}
+			Thread.sleep(getSsnRequestTimeout());
+		}
+		while (status.contains(actualStatus));
+
+		if (status.contains(actualStatus)) {
+			LOGGER.info("ERROR: {}: Timeout has been expired for request.", notebookName);
+			LOGGER.info("  {}: URL is {}", notebookName, url);
+			LOGGER.info("  {}: token is {}", notebookName, token);
+			LOGGER.info("  {}: status is {}", notebookName, status);
+			LOGGER.info("  {}: timeout is {}", notebookName, duration);
+		} else {
+			LOGGER.info("{}: Current state for Notebook {} is {}", notebookName, notebookName, actualStatus);
+		}
+
+		return actualStatus;
+	}
+
+	public static String cluster(String url, String token, String notebookName, String computationalName, String
+			status, Duration duration)
+			throws InterruptedException {
+		LOGGER.info("{}: Waiting until status {} with URL {} with token {} for computational {} on notebook {}",
+				notebookName, status, url, token, computationalName, notebookName);
+		HttpRequest request = new HttpRequest();
+		String actualStatus;
+		long timeout = duration.toMillis();
+		long expiredTime = System.currentTimeMillis() + timeout;
+
+		do {
+			actualStatus = getClusterStatus(request.webApiGet(url, token)
+					.getBody()
+					.jsonPath(), notebookName, computationalName);
+			if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
+				break;
+			}
+			Thread.sleep(getSsnRequestTimeout());
+		}
+		while (actualStatus.contains(status));
+
+		if (actualStatus.contains(status)) {
+			LOGGER.info("ERROR: Timeout has been expired for request.");
+			LOGGER.info("  URL is {}", url);
+			LOGGER.info("  token is {}", token);
+			LOGGER.info("  status is {}", status);
+			LOGGER.info("  timeout is {}", duration);
+		} else {
+			LOGGER.info("{}: Current state for cluster {} on notebook is {}", notebookName, computationalName,
+					actualStatus);
+		}
+
+		return actualStatus;
+	}
+
+	@SuppressWarnings("unchecked")
+	public static String getClusterStatus(JsonPath json, String notebookName, String computationalName) {
+		return (String) json.getList(EXPLORATORY_PATH)
+				.stream()
+				.filter(exploratoryNamePredicate(notebookName))
+				.flatMap(computationalResourcesStream())
+				.filter(computationalNamePredicate(computationalName))
+				.map(statusFieldPredicate())
+				.findAny()
+				.orElse(StringUtils.EMPTY);
+	}
+
+	private static String getNotebookStatus(JsonPath json, String notebookName) {
+		List<Map<String, String>> notebooks = json.getList(EXPLORATORY_PATH);
+		return notebooks.stream().filter(exploratoryNamePredicate(notebookName))
+				.map(e -> e.get("status"))
+				.findAny()
+				.orElse(StringUtils.EMPTY);
+	}
+
+	private static Function<Object, Object> statusFieldPredicate() {
+		return cr -> (((HashMap) cr).get("status"));
+	}
+
+	private static Predicate<Object> computationalNamePredicate(String computationalName) {
+		return cr -> computationalName.equals(((HashMap) cr).get("computational_name"));
+	}
+
+	private static Function<Object, Stream<?>> computationalResourcesStream() {
+		return d -> ((List) ((HashMap) d).get("computational_resources")).stream();
+	}
+
+	private static Predicate<Object> exploratoryNamePredicate(String notebookName) {
+		return d -> notebookName.equals(((HashMap) d).get("exploratory_name"));
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/http/ApiPath.java b/integration-tests/src/main/java/com/epam/datalab/automation/http/ApiPath.java
new file mode 100644
index 0000000..936f464
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/http/ApiPath.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.http;
+
+public class ApiPath {
+
+    public static final String LOGIN = "/api/user/login";
+    public static final String LOGOUT = "/api/user/logout";
+    public static final String UPLOAD_KEY = "/api/user/access_key"; 
+    public static final String AUTHORIZE_USER = "/api/user/authorize";
+    public static final String EXP_ENVIRONMENT = "/api/infrastructure_provision/exploratory_environment";
+    public static final String PROVISIONED_RES = "/api/infrastructure/info";
+    public static final String COMPUTATIONAL_RES = "/api/infrastructure_provision/computational_resources/dataengine-service";
+    public static final String COMPUTATIONAL_RES_SPARK = "/api/infrastructure_provision/computational_resources/dataengine";
+    private static final String STOP_NOTEBOOK = EXP_ENVIRONMENT + "/%s/stop";
+    private static final String TERMINATE_CLUSTER =
+			"/api/infrastructure_provision/computational_resources/%s/%s/terminate";
+	private static final String START_CLUSTER = "/api/infrastructure_provision/computational_resources/%s/%s/start";
+	private static final String STOP_CLUSTER = "/api/infrastructure_provision/computational_resources/%s/%s/stop";
+    private static final String TERMINATE_NOTEBOOK = EXP_ENVIRONMENT + "/%s/terminate";
+    public static final String LIB_GROUPS = "/api/infrastructure_provision/exploratory_environment/lib_groups";
+    public static final String LIB_LIST = "/api/infrastructure_provision/exploratory_environment/search/lib_list";
+    public static final String LIB_INSTALL = "/api/infrastructure_provision/exploratory_environment/lib_install";
+    public static final String LIB_LIST_EXPLORATORY_FORMATTED = "/api/infrastructure_provision/exploratory_environment/lib_list/formatted";
+    public static final String IMAGE_CREATION = "/api/infrastructure_provision/exploratory_environment/image";
+
+    private ApiPath(){}
+
+
+    private static String configureURL(String url, Object... args) {
+        return String.format(url, args);        
+    }
+    
+    public static String getStopNotebookUrl(String serviceBaseName) {
+        return configureURL(STOP_NOTEBOOK, serviceBaseName);
+    }
+    
+    public static String getTerminateClusterUrl(String notebookName, String desName) {
+        return configureURL(TERMINATE_CLUSTER, notebookName, desName);
+    }
+    
+    public static String getTerminateNotebookUrl(String serviceBaseName) {
+        return configureURL(TERMINATE_NOTEBOOK, serviceBaseName);
+    }
+
+	public static String getStartClusterUrl(String notebookName, String desName) {
+		return configureURL(START_CLUSTER, notebookName, desName);
+	}
+
+	public static String getStopClusterUrl(String notebookName, String desName) {
+		return configureURL(STOP_CLUSTER, notebookName, desName);
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/http/ContentType.java b/integration-tests/src/main/java/com/epam/datalab/automation/http/ContentType.java
new file mode 100644
index 0000000..93fe54b
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/http/ContentType.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.http;
+
+public class ContentType{
+    public static final String FORMDATA = "multipart/form-data";
+    public static final String JSON = "application/json";
+    public static final String ANY = "*/*";
+    public static final String TEXT = "text/html";
+
+	private ContentType() {
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/http/HttpRequest.java b/integration-tests/src/main/java/com/epam/datalab/automation/http/HttpRequest.java
new file mode 100644
index 0000000..0c9530d
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/http/HttpRequest.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.http;
+
+import com.epam.datalab.automation.helper.ConfigPropertyValue;
+import com.jayway.restassured.http.ContentType;
+import com.jayway.restassured.response.Response;
+
+import java.io.File;
+import java.util.Map;
+
+import static com.jayway.restassured.RestAssured.given;
+
+public class HttpRequest {
+
+	private static final String AUTHORIZATION = "Authorization";
+	private static final String BEARER = "Bearer ";
+
+	private void addHeader(String headerType, String headerValue) {
+		given().header(headerType, headerValue);
+	}
+
+	public void addAuthorizationBearer(String token) {
+		this.addHeader(AUTHORIZATION, BEARER + token);
+	}
+
+	public Response webApiGet(String url) {
+		return given().contentType(ContentType.JSON).when().get(url);
+	}
+
+	public Response webApiGet(String url, String token) {
+		return given().header(AUTHORIZATION, BEARER + token).contentType(ContentType.JSON).when().get(url);
+	}
+	
+	public Response webApiGet(String url, String token, Map<String,?> params) {
+		return given().header(AUTHORIZATION, BEARER + token).contentType(ContentType.JSON).params(params).when().get
+				(url);
+	}
+
+	public Response webApiPost(String url, String contentType, Object body) {
+		return given().contentType(contentType).body(body).when().post(url);
+	}
+
+	public Response webApiPost(String url, String contentType) {
+		return given().contentType(contentType).when().post(url);
+	}
+
+	public Response webApiPost(String url, String contentType, String token) {
+		return given()
+				.contentType(contentType)
+				.header(AUTHORIZATION, BEARER + token)
+				.multiPart(new File(ConfigPropertyValue.getAccessKeyPubFileName()))
+				.formParam(ConfigPropertyValue.getAccessKeyPubFileName())
+				.contentType(contentType)
+				.when()
+				.post(url);
+	}
+
+	public Response webApiPost(String url, String contentType, Object body, String token) {
+		return given().contentType(contentType).header(AUTHORIZATION, BEARER + token).body(body).when().post(url);
+	}
+
+	public Response webApiPut(String url, String contentType, Object body, String token) {
+		return given().contentType(contentType).header(AUTHORIZATION, BEARER + token).body(body).when().put(url);
+	}
+
+	public Response webApiPut(String url, String contentType, String token) {
+		return given().contentType(contentType).header(AUTHORIZATION, BEARER + token).when().put(url);
+	}
+
+	public Response webApiDelete(String url, String contentType, String token) {
+		return given().contentType(contentType).header(AUTHORIZATION, BEARER + token).when().delete(url);
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/http/HttpStatusCode.java b/integration-tests/src/main/java/com/epam/datalab/automation/http/HttpStatusCode.java
new file mode 100644
index 0000000..e6e5dad
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/http/HttpStatusCode.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.http;
+
+public class HttpStatusCode {
+    
+    public static final int OK = 200;
+    public static final int UNAUTHORIZED = 401;
+    public static final int ACCEPTED = 202;
+    public static final int NOT_FOUND = 404;
+
+	private HttpStatusCode() {
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/jenkins/JenkinsConfigProperties.java b/integration-tests/src/main/java/com/epam/datalab/automation/jenkins/JenkinsConfigProperties.java
new file mode 100644
index 0000000..9240ea2
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/jenkins/JenkinsConfigProperties.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.jenkins;
+
+public class JenkinsConfigProperties {
+
+    static final long JENKINS_REQUEST_TIMEOUT = 5000;
+
+	static final String AUTHORIZATION = "Authorization";
+	static final String AUTHORIZATION_KEY = "Basic %s";//the replacement is decoded to base64 user:password
+
+	static final String SUCCESS_STATUS = "true";
+	static final String JENKINS_JOB_NAME_SEARCH = "/";
+
+	static String jenkinsJobStartBody = "\"name=Access_Key_ID&value=%s" +
+            "&name=Secret_Access_Key&value=%s" +
+            "&name=Infrastructure_Tag&value=%s" +
+            "name=OS_user&value=%s&name=Cloud_provider&value=aws&name=OS_family&value=%s&name=Action&value=create" +
+            "&json=%7B%22parameter" +
+            "%22%3A+%5B%7B%22name%22%3A+%22Access_Key_ID%22%2C+%22value%22%3A+%22%s" +
+            "%22%7D%2C+%7B%22name%22%3A+%22Secret_Access_Key%22%2C+%22value%22%3A+%22%s" +
+            "%22%7D%2C+%7B%22name%22%3A+%22Infrastructure_Tag%22%2C+%22value%22%3A+%22%s" +
+            "%22%7D%2C+%7B%22name%22%3A+%22OS_user%22%2C+%22value%22%3A+%22%s" +
+            "%22%7D%2C+%7B%22name%22%3A+%22Cloud_provider%22%2C+%22value%22%3A+%22aws" +
+            "%22%7D%2C+%7B%22name%22%3A+%22OS_family%22%2C+%22value%22%3A+%22%s" +
+            "%22%7D%2C+%7B%22name%22%3A+%22Action%22%2C+%22value%22%3A+%22create" +
+            "%22%7D%5D%7D&Submit=Build";
+
+	private JenkinsConfigProperties() {
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/jenkins/JenkinsResponseElements.java b/integration-tests/src/main/java/com/epam/datalab/automation/jenkins/JenkinsResponseElements.java
new file mode 100644
index 0000000..989558e
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/jenkins/JenkinsResponseElements.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.jenkins;
+
+public class JenkinsResponseElements {
+	public static final String IN_QUEUE_ELEMENT = "freeStyleProject.inQueue";
+	public static final String HTML_TITLE = "html.head.title";
+	public static final String RESULT = "result";
+
+	private JenkinsResponseElements() {
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/jenkins/JenkinsService.java b/integration-tests/src/main/java/com/epam/datalab/automation/jenkins/JenkinsService.java
new file mode 100644
index 0000000..4606f55
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/jenkins/JenkinsService.java
@@ -0,0 +1,225 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.jenkins;
+
+import com.epam.datalab.automation.exceptions.JenkinsException;
+import com.epam.datalab.automation.helper.ConfigPropertyValue;
+import com.epam.datalab.automation.helper.NamingHelper;
+import com.epam.datalab.automation.http.HttpStatusCode;
+import com.jayway.restassured.RestAssured;
+import com.jayway.restassured.authentication.FormAuthConfig;
+import com.jayway.restassured.http.ContentType;
+import com.jayway.restassured.response.Response;
+import com.jayway.restassured.specification.RequestSpecification;
+import org.apache.commons.codec.binary.Base64;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.testng.Assert;
+
+import java.time.Duration;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import static com.jayway.restassured.RestAssured.given;
+
+public class JenkinsService {
+	private static final Logger LOGGER = LogManager.getLogger(JenkinsService.class);
+
+    private final String awsAccessKeyId;
+    private final String awsSecretAccessKey;
+    
+    private String ssnURL;
+    private String serviceBaseName;
+
+	private FormAuthConfig config = new FormAuthConfig(JenkinsConfigProperties.JENKINS_JOB_NAME_SEARCH, "username",
+			"password");
+    
+    public JenkinsService(){
+    	if (!ConfigPropertyValue.isUseJenkins()) {
+    		ssnURL = ConfigPropertyValue.getSsnUrl();
+    		serviceBaseName = ConfigPropertyValue.getServiceBaseName();
+    	}
+        awsAccessKeyId = convertToParam(ConfigPropertyValue.getAwsAccessKeyId());
+        awsSecretAccessKey = convertToParam(ConfigPropertyValue.getAwsSecretAccessKey());
+    }
+    
+    private String convertToParam(String s) {
+    	s= s.replaceAll("/", "%2F");
+    	return s;
+    }
+    
+    public String getSsnURL() {
+        return ssnURL;
+    }
+
+    public String getServiceBaseName() {
+        return serviceBaseName;
+    }
+    
+    private String getQueueStatus() {
+    	return getWhen(ContentType.XML)
+                .get(JenkinsUrls.API).getBody()
+                .xmlPath()
+                .getString(JenkinsResponseElements.IN_QUEUE_ELEMENT);
+    }
+
+	private void waitForJenkinsStartup(Duration duration) throws InterruptedException {
+    	String actualStatus;
+    	long timeout = duration.toMillis();
+        long expiredTime = System.currentTimeMillis() + timeout;
+        
+    	while ((actualStatus = getQueueStatus()).endsWith(JenkinsConfigProperties.SUCCESS_STATUS)) {
+            Thread.sleep(JenkinsConfigProperties.JENKINS_REQUEST_TIMEOUT);
+            if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
+            	actualStatus = getQueueStatus();
+            	break;
+            }
+        }
+        
+        if (actualStatus.endsWith(JenkinsConfigProperties.SUCCESS_STATUS)) {
+            LOGGER.info("ERROR: Timeout has been expired for Jenkins");
+            LOGGER.info("  timeout is {}");
+        }
+    }
+
+	public String runJenkinsJob(String jenkinsJobURL) throws InterruptedException {
+    	if (!ConfigPropertyValue.isUseJenkins()) {
+    		return ConfigPropertyValue.getJenkinsBuildNumber();
+    	}
+
+		baseUriInitialize(jenkinsJobURL);
+        String dateAsString = NamingHelper.generateRandomValue();
+        Response responsePostJob = getWhen(ContentType.URLENC)
+				.body(String.format(JenkinsConfigProperties.jenkinsJobStartBody,
+                        awsAccessKeyId, awsSecretAccessKey, dateAsString,
+                        ConfigPropertyValue.getClusterOsUser(), ConfigPropertyValue.getClusterOsFamily(),
+                        awsAccessKeyId, awsSecretAccessKey, dateAsString,
+                        ConfigPropertyValue.getClusterOsUser(), ConfigPropertyValue.getClusterOsFamily()))
+        		.post(jenkinsJobURL + "build");
+        Assert.assertEquals(responsePostJob.statusCode(), HttpStatusCode.OK);
+        
+        waitForJenkinsStartup(ConfigPropertyValue.getTimeoutJenkinsAutotest());
+        
+        setBuildNumber();
+        checkBuildResult();
+        setJenkinsURLServiceBaseName();
+        
+        return ConfigPropertyValue.getJenkinsBuildNumber();
+    }
+
+	public String getJenkinsJob() throws InterruptedException {
+    	if (!ConfigPropertyValue.isUseJenkins()) {
+    		return ConfigPropertyValue.getJenkinsBuildNumber();
+    	}
+
+		baseUriInitialize(ConfigPropertyValue.getJenkinsJobURL());
+
+        setBuildNumber();
+        checkBuildResult();
+        setJenkinsURLServiceBaseName();
+
+        return ConfigPropertyValue.getJenkinsBuildNumber();
+    }
+
+	private static void baseUriInitialize(String value) {
+		RestAssured.baseURI = value;
+	}
+
+	private void setBuildNumber() {
+        if (ConfigPropertyValue.getJenkinsBuildNumber() != null) {
+            LOGGER.info("Jenkins build number is {}", ConfigPropertyValue.getJenkinsBuildNumber());
+        	return;
+    	}
+
+        String buildName = getWhen(ContentType.URLENC)
+                .get(JenkinsUrls.LAST_BUILD).getBody().htmlPath().getString(JenkinsResponseElements.HTML_TITLE);
+        
+        Pattern pattern = Pattern.compile("\\s#\\d+(?!\\d+)\\s");      
+        Matcher matcher = pattern.matcher(buildName);
+        if(matcher.find()) {
+        	ConfigPropertyValue.setJenkinsBuildNumber(matcher.group().substring(2).trim());
+        } else {
+			throw new JenkinsException("Jenkins job was failed. There is no buildNumber");
+        }
+        LOGGER.info("Jenkins build number is {}", ConfigPropertyValue.getJenkinsBuildNumber());
+    }
+
+
+	private void checkBuildResult() throws InterruptedException {
+    	String buildResult;
+    	long timeout = ConfigPropertyValue.getTimeoutJenkinsAutotest().toMillis();
+    	long expiredTime = System.currentTimeMillis() + timeout;
+        
+        do {
+        	buildResult = getWhen(ContentType.JSON)
+        			.get(ConfigPropertyValue.getJenkinsBuildNumber() + JenkinsUrls.JSON_PRETTY)
+        			.getBody()
+                    .jsonPath()
+                    .getString(JenkinsResponseElements.RESULT);
+            if (buildResult == null) {
+            	if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
+					throw new JenkinsException("Timeout has been expired for Jenkins build. Timeout is " +
+							ConfigPropertyValue.getTimeoutJenkinsAutotest());
+            	}
+            	Thread.sleep(JenkinsConfigProperties.JENKINS_REQUEST_TIMEOUT);
+            }
+        } while (buildResult == null);
+        
+        if(!buildResult.equals("SUCCESS")) {
+			throw new JenkinsException("Jenkins job was failed. Build result is not success");
+        }
+    }
+
+	private void setJenkinsURLServiceBaseName() {
+        String jenkinsHoleURL = getWhen(ContentType.TEXT)
+        		.get(ConfigPropertyValue.getJenkinsBuildNumber() + JenkinsUrls.LOG_TEXT)
+        		.getBody()
+                .prettyPrint();
+        Pattern pattern = Pattern.compile("Jenkins URL:(.+)");      
+        Matcher matcher = pattern.matcher(jenkinsHoleURL);
+        if(matcher.find()) {
+        	ssnURL = matcher.group(1).replaceAll("/jenkins", "");         
+        }
+            
+        pattern = Pattern.compile("Service base name:(.+)");      
+        matcher = pattern.matcher(jenkinsHoleURL);
+        if(matcher.find()) {
+        	serviceBaseName = matcher.group(1);         
+        } else {
+			throw new JenkinsException("SSN URL in Jenkins job not found");
+        }
+    }
+
+    private RequestSpecification getWhen(ContentType contentType) {
+        return given()
+                .header(JenkinsConfigProperties.AUTHORIZATION,
+						String.format(JenkinsConfigProperties.AUTHORIZATION_KEY, base64CredentialDecode
+								(ConfigPropertyValue.get(ConfigPropertyValue.JENKINS_USERNAME), ConfigPropertyValue
+										.get(ConfigPropertyValue.JENKINS_PASS))))
+        		.auth()
+                .form(ConfigPropertyValue.getJenkinsUsername(), ConfigPropertyValue.getJenkinsPassword(), config)
+        		.contentType(contentType).when();
+    }
+
+    private static String base64CredentialDecode(String user, String password) {
+        byte[] bytesEncoded = Base64.encodeBase64(String.format("%s:%s", user, password).getBytes());
+        return new String(bytesEncoded);
+    }
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/jenkins/JenkinsUrls.java b/integration-tests/src/main/java/com/epam/datalab/automation/jenkins/JenkinsUrls.java
new file mode 100644
index 0000000..3e88c55
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/jenkins/JenkinsUrls.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.jenkins;
+
+public class JenkinsUrls {
+	public static final String API = "api/xml";
+	public static final String LAST_BUILD = "lastBuild";
+	public static final String JSON_PRETTY = "/api/json?pretty=true";
+	public static final String LOG_TEXT = "/logText/progressiveText?start=0";
+
+	private JenkinsUrls() {
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/model/CreateNotebookDto.java b/integration-tests/src/main/java/com/epam/datalab/automation/model/CreateNotebookDto.java
new file mode 100644
index 0000000..fc309ba
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/model/CreateNotebookDto.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.model;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class CreateNotebookDto {
+    
+	private String image;
+    private String name;
+    @JsonProperty("template_name")
+    private String templateName;
+    private String shape;
+    private String version;
+	@JsonProperty("notebook_image_name")
+	private String imageName;
+
+	public String getImageName() {
+		return imageName;
+	}
+
+	public void setImageName(String imageName) {
+		this.imageName = imageName;
+	}
+
+	public String getImage() {
+		return image;
+	}
+
+	public void setImage(String image) {
+		this.image = image;
+	}
+
+	public String getName() {
+        return name;
+    }
+    
+    public void setName(String name) {
+        this.name = name;
+    }
+    
+    public String getShape() {
+        return shape;
+    }
+    
+    public void setShape(String shape) {
+        this.shape = shape;
+    }
+    
+	public String getTemplateName() {
+		return templateName;
+	}
+
+	public void setTemplateName(String templateName) {
+		this.templateName = templateName;
+	}
+
+    public String getVersion() {
+        return version;
+    }
+    
+    public void setVersion(String version) {
+        this.version = version;
+    }
+    
+    public CreateNotebookDto(){
+		//This empty constructor is required for proper serialization/deserialization
+    }
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/model/DeployClusterDto.java b/integration-tests/src/main/java/com/epam/datalab/automation/model/DeployClusterDto.java
new file mode 100644
index 0000000..1282972
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/model/DeployClusterDto.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.model;
+
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public abstract class DeployClusterDto {
+
+	private String image;
+
+	@JsonProperty("template_name")
+	private String templateName;
+	private String name;
+
+	@JsonProperty("notebook_name")
+	private String notebookName;
+
+	public String getImage() {
+		return image;
+	}
+
+	public void setImage(String image) {
+		this.image = image;
+	}
+
+	public String getTemplateName() {
+		return templateName;
+	}
+
+	public void setTemplateName(String templateName) {
+		this.templateName = templateName;
+	}
+
+	public String getName() {
+		return name;
+	}
+
+	public void setName(String name) {
+		this.name = name;
+	}
+
+	public String getNotebookName() {
+		return notebookName;
+	}
+
+	public void setNotebookName(String notebookName) {
+		this.notebookName = notebookName;
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/model/DeployDataProcDto.java b/integration-tests/src/main/java/com/epam/datalab/automation/model/DeployDataProcDto.java
new file mode 100644
index 0000000..5448a6c
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/model/DeployDataProcDto.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.model;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.base.MoreObjects;
+
+public class DeployDataProcDto extends DeployClusterDto {
+
+	@JsonProperty("dataproc_master_count")
+	private String dataprocMasterCount;
+
+	@JsonProperty("dataproc_slave_count")
+	private String dataprocSlaveCount;
+
+	@JsonProperty("dataproc_preemptible_count")
+	private String dataprocPreemptibleCount;
+
+	@JsonProperty("dataproc_master_instance_type")
+	private String dataprocMasterInstanceType;
+
+	@JsonProperty("dataproc_slave_instance_type")
+	private String dataprocSlaveInstanceType;
+
+	@JsonProperty("dataproc_version")
+	private String dataprocVersion;
+
+
+	public String getDataprocMasterCount() {
+		return dataprocMasterCount;
+	}
+
+	public void setDataprocMasterCount(String dataprocMasterCount) {
+		this.dataprocMasterCount = dataprocMasterCount;
+	}
+
+	public String getDataprocSlaveCount() {
+		return dataprocSlaveCount;
+	}
+
+	public void setDataprocSlaveCount(String dataprocSlaveCount) {
+		this.dataprocSlaveCount = dataprocSlaveCount;
+	}
+
+	public String getDataprocPreemptibleCount() {
+		return dataprocPreemptibleCount;
+	}
+
+	public void setDataprocPreemptibleCount(String dataprocPreemptibleCount) {
+		this.dataprocPreemptibleCount = dataprocPreemptibleCount;
+	}
+
+	public String getDataprocMasterInstanceType() {
+		return dataprocMasterInstanceType;
+	}
+
+	public void setDataprocMasterInstanceType(String dataprocMasterInstanceType) {
+		this.dataprocMasterInstanceType = dataprocMasterInstanceType;
+	}
+
+	public String getDataprocSlaveInstanceType() {
+		return dataprocSlaveInstanceType;
+	}
+
+	public void setDataprocSlaveInstanceType(String dataprocSlaveInstanceType) {
+		this.dataprocSlaveInstanceType = dataprocSlaveInstanceType;
+	}
+
+	public String getDataprocVersion() {
+		return dataprocVersion;
+	}
+
+	public void setDataprocVersion(String dataprocVersion) {
+		this.dataprocVersion = dataprocVersion;
+	}
+
+	@Override
+	public String toString() {
+		return MoreObjects.toStringHelper(this)
+				.add("image", getImage())
+				.add("template_name", getTemplateName())
+				.add("name", getName())
+				.add("notebook_name", getNotebookName())
+				.add("dataproc_master_count", dataprocMasterCount)
+				.add("dataproc_slave_count", dataprocSlaveCount)
+				.add("dataproc_preemptible_count", dataprocPreemptibleCount)
+				.add("dataproc_master_instance_type", dataprocMasterInstanceType)
+				.add("dataproc_slave_instance_type", dataprocSlaveInstanceType)
+				.add("dataproc_version", dataprocVersion)
+				.toString();
+	}
+}
diff --git a/integration-tests/src/main/java/com/epam/datalab/automation/model/DeployEMRDto.java b/integration-tests/src/main/java/com/epam/datalab/automation/model/DeployEMRDto.java
new file mode 100644
index 0000000..2e43a2b
--- /dev/null
+++ b/integration-tests/src/main/java/com/epam/datalab/automation/model/DeployEMRDto.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.datalab.automation.model;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.base.MoreObjects;
+
+public class DeployEMRDto extends DeployClusterDto{
+
+	@JsonProperty("emr_instance_count")
+	private String emrInstanceCount;
+
+	@JsonProperty("emr_master_instance_type")
+	private String emrMasterInstanceType;
+
+	@JsonProperty("emr_slave_instance_type")
+	private String emrSlaveInstanceType;
... 3443 lines suppressed ...

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org