You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ni...@apache.org on 2017/01/03 11:43:07 UTC

[02/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/AsyncJobRunnerImplTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/AsyncJobRunnerImplTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/AsyncJobRunnerImplTest.java
new file mode 100644
index 0000000..9aac3c2
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/AsyncJobRunnerImplTest.java
@@ -0,0 +1,138 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.hive20;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Props;
+import akka.testkit.JavaTestKit;
+import com.google.common.base.Optional;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.actor.HiveActor;
+import org.apache.ambari.view.hive20.actor.ResultSetIterator;
+import org.apache.ambari.view.hive20.actor.message.Connect;
+import org.apache.ambari.view.hive20.actor.message.ExecuteJob;
+import org.apache.ambari.view.hive20.actor.message.FetchError;
+import org.apache.ambari.view.hive20.actor.message.FetchResult;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.actor.message.SQLStatementJob;
+import org.apache.ambari.view.hive20.actor.message.job.CancelJob;
+import org.apache.ambari.view.hive20.actor.message.job.Failure;
+import org.apache.ambari.view.hive20.client.AsyncJobRunnerImpl;
+import org.apache.ambari.view.hive20.client.ConnectionConfig;
+import org.apache.ambari.view.hive20.client.NonPersistentCursor;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.hive.jdbc.HiveQueryResultSet;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.sql.ResultSet;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
+
+public class AsyncJobRunnerImplTest {
+
+  private ActorSystem actorSystem;
+
+  @Before
+  public void setUp() throws Exception {
+    actorSystem = ActorSystem.create("TestingActorSystem");
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    JavaTestKit.shutdownActorSystem(actorSystem);
+  }
+
+
+  @Test
+  public void testSubmitJob() throws Exception {
+    ConnectionConfig connectionConfig = createNiceMock(ConnectionConfig.class);
+    SQLStatementJob sqlStatementJob = createNiceMock(SQLStatementJob.class);
+    Job job = createNiceMock(Job.class);
+    Connect connect = createNiceMock(Connect.class);
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    ActorRef controller = actorSystem.actorOf(
+            Props.create(TestParent.class));
+    AsyncJobRunnerImpl runner = new AsyncJobRunnerImpl(viewContext, controller, actorSystem);
+    expect(job.getId()).andReturn("1");
+    expect(connect.getJdbcUrl()).andReturn("testjdbc");
+    expect(connectionConfig.createConnectMessage("1")).andReturn(connect);
+    replay(job, connectionConfig);
+    runner.submitJob(connectionConfig, sqlStatementJob, job);
+    verify(job, connectionConfig);
+  }
+
+  @Test
+  public void testCancelJob() throws Exception {
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    ActorRef controller = actorSystem.actorOf(
+            Props.create(TestParent.class));
+    AsyncJobRunnerImpl runner = new AsyncJobRunnerImpl(viewContext, controller, actorSystem);
+    runner.cancelJob("1", "test");
+  }
+
+  @Test
+  public void testGetCursor() throws Exception {
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    ActorRef controller = actorSystem.actorOf(
+            Props.create(TestParent.class));
+    AsyncJobRunnerImpl runner = new AsyncJobRunnerImpl(viewContext, controller, actorSystem);
+    Optional<NonPersistentCursor> cursor = runner.getCursor("1", "test");
+    assertTrue(cursor.isPresent());
+  }
+
+
+  @Test
+  public void testGetError() throws Exception {
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    ActorRef controller = actorSystem.actorOf(
+            Props.create(TestParent.class));
+    AsyncJobRunnerImpl runner = new AsyncJobRunnerImpl(viewContext, controller, actorSystem);
+    Optional<Failure> failure = runner.getError("1", "test");
+    assertTrue(failure.isPresent());
+    assertEquals("failure", failure.get().getMessage());
+  }
+
+  private static class TestParent extends HiveActor {
+
+    @Override
+    public void handleMessage(HiveMessage hiveMessage) {
+      if (hiveMessage.getMessage() instanceof ExecuteJob) {
+        ExecuteJob executeJob = (ExecuteJob) hiveMessage.getMessage();
+        assertEquals(executeJob.getConnect().getJdbcUrl(), "testjdbc");
+      }
+      if (hiveMessage.getMessage() instanceof CancelJob) {
+        CancelJob cancelJob = (CancelJob) hiveMessage.getMessage();
+        assertEquals("1", cancelJob.getJobId());
+        assertEquals("test", cancelJob.getUsername());
+      }
+      if (hiveMessage.getMessage() instanceof FetchError) {
+        sender().tell(Optional.of(new Failure("failure", new NullPointerException())), self());
+      }
+      if (hiveMessage.getMessage() instanceof FetchResult) {
+        ResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+        ActorRef rsi = context().actorOf(
+                Props.create(ResultSetIterator.class, self(), resultSet));
+        sender().tell(Optional.of(rsi), self());
+      }
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ConnectionFailuresTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ConnectionFailuresTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ConnectionFailuresTest.java
new file mode 100644
index 0000000..996efd4
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ConnectionFailuresTest.java
@@ -0,0 +1,157 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Props;
+import akka.testkit.JavaTestKit;
+import com.google.common.base.Optional;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.actor.DeathWatch;
+import org.apache.ambari.view.hive20.actor.OperationController;
+import org.apache.ambari.view.hive20.actor.message.Connect;
+import org.apache.ambari.view.hive20.actor.message.ExecuteJob;
+import org.apache.ambari.view.hive20.actor.message.HiveJob;
+import org.apache.ambari.view.hive20.actor.message.SQLStatementJob;
+import org.apache.ambari.view.hive20.internal.ConnectionSupplier;
+import org.apache.ambari.view.hive20.internal.DataStorageSupplier;
+import org.apache.ambari.view.hive20.internal.HdfsApiSupplier;
+import org.apache.ambari.view.hive20.internal.HiveConnectionWrapper;
+import org.apache.ambari.view.hive20.persistence.Storage;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.jdbc.HiveQueryResultSet;
+import org.apache.hive.jdbc.HiveStatement;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.HashMap;
+
+import static org.easymock.EasyMock.*;
+
+public class ConnectionFailuresTest {
+
+  private ActorSystem actorSystem;
+
+  @Before
+  public void setUp() throws Exception {
+      actorSystem = ActorSystem.create("TestingActorSystem");
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    JavaTestKit.shutdownActorSystem(actorSystem);
+  }
+
+  @Test
+  public void testConnectionFailure() throws Exception {
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    ConnectionSupplier connectionSupplier = createNiceMock(ConnectionSupplier.class);
+    DataStorageSupplier dataStorageSupplier = createNiceMock(DataStorageSupplier.class);
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+    HdfsApiSupplier hdfsApiSupplier = createNiceMock(HdfsApiSupplier.class);
+    Connect connect = createNiceMock(Connect.class);
+    Storage storage = createNiceMock(Storage.class);
+    JobImpl jobImpl = createNiceMock(JobImpl.class);
+    ResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+    HiveStatement statement = createNiceMock(HiveStatement.class);
+    ConnectionDelegate delegate = createNiceMock(ConnectionDelegate.class);
+    HiveConnectionWrapper connectionWrapper = createNiceMock(HiveConnectionWrapper.class);
+    HiveConnection hiveConnection = createNiceMock(HiveConnection.class);
+    HiveJob test = new SQLStatementJob(HiveJob.Type.ASYNC, new String[]{"select * from test"}, "test", "1", "test.log");
+    ExecuteJob executeJob = new ExecuteJob(connect, test);
+    ActorRef deathwatch = actorSystem.actorOf(Props.create(DeathWatch.class));
+    ActorRef operationControl = actorSystem.actorOf(
+            Props.create(OperationController.class, actorSystem, deathwatch, viewContext, connectionSupplier, dataStorageSupplier, hdfsApiSupplier), "operationController-test");
+    expect(hdfsApiSupplier.get(viewContext)).andReturn(Optional.of(hdfsApi));
+    expect(viewContext.getInstanceName()).andReturn("test").anyTimes();
+    expect(viewContext.getProperties()).andReturn(new HashMap<String, String>()).anyTimes();
+    expect(connect.getConnectable(anyObject(AuthParams.class))).andReturn(connectionWrapper);
+    expect(connectionWrapper.isOpen()).andReturn(false).anyTimes();
+    expect(connectionWrapper.getConnection()).andReturn(Optional.<HiveConnection>absent()).anyTimes();
+    expect(dataStorageSupplier.get(viewContext)).andReturn(storage);
+    expect(connectionSupplier.get(viewContext)).andReturn(delegate);
+    expect(storage.load(JobImpl.class, "1")).andReturn(jobImpl).anyTimes();
+    expect(jobImpl.getDateSubmitted()).andReturn(0L).times(1);
+    connectionWrapper.connect();
+    jobImpl.setStatus(Job.JOB_STATE_ERROR);
+    storage.store(JobImpl.class, jobImpl);
+    replay(viewContext, connect, hdfsApiSupplier, dataStorageSupplier, connectionWrapper,
+            storage, jobImpl, connectionSupplier, delegate, statement, resultSet);
+
+    operationControl.tell(executeJob, ActorRef.noSender());
+    Thread.sleep(5000);
+    verify(connect, hdfsApiSupplier, dataStorageSupplier, connectionWrapper,
+            storage, jobImpl, connectionSupplier, delegate, statement, resultSet);
+
+  }
+
+  @Test
+  public void testExecutionFailure() throws Exception {
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    ConnectionSupplier connectionSupplier = createNiceMock(ConnectionSupplier.class);
+    DataStorageSupplier dataStorageSupplier = createNiceMock(DataStorageSupplier.class);
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+    HdfsApiSupplier hdfsApiSupplier = createNiceMock(HdfsApiSupplier.class);
+    Connect connect = createNiceMock(Connect.class);
+    Storage storage = createNiceMock(Storage.class);
+    JobImpl jobImpl = createNiceMock(JobImpl.class);
+    ResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+    HiveStatement statement = createNiceMock(HiveStatement.class);
+    ConnectionDelegate delegate = createNiceMock(ConnectionDelegate.class);
+    HiveConnectionWrapper connectionWrapper = createNiceMock(HiveConnectionWrapper.class);
+    HiveConnection hiveConnection = createNiceMock(HiveConnection.class);
+    HiveJob test = new SQLStatementJob(HiveJob.Type.ASYNC, new String[]{"select * from test"}, "test", "1", "test.log");
+    ExecuteJob executeJob = new ExecuteJob(connect, test);
+    ActorRef deathwatch = actorSystem.actorOf(Props.create(DeathWatch.class));
+    ActorRef operationControl = actorSystem.actorOf(
+            Props.create(OperationController.class, actorSystem, deathwatch, viewContext, connectionSupplier, dataStorageSupplier, hdfsApiSupplier), "operationController-test");
+    expect(hdfsApiSupplier.get(viewContext)).andReturn(Optional.of(hdfsApi));
+    expect(viewContext.getProperties()).andReturn(new HashMap<String, String>()).anyTimes();
+    expect(connect.getConnectable(anyObject(AuthParams.class))).andReturn(connectionWrapper);
+    expect(connectionWrapper.isOpen()).andReturn(false);
+    expect(connectionWrapper.getConnection()).andReturn(Optional.of(hiveConnection)).anyTimes();
+    expect(dataStorageSupplier.get(viewContext)).andReturn(storage);
+    expect(connectionSupplier.get(viewContext)).andReturn(delegate);
+    expect(storage.load(JobImpl.class, "1")).andReturn(jobImpl).anyTimes();
+    expect(delegate.createStatement(hiveConnection)).andReturn(statement);
+    expect(delegate.execute("select * from test")).andThrow(new SQLException("Syntax error"));
+    expect(jobImpl.getDateSubmitted()).andReturn(0L).times(2);
+    jobImpl.setStatus(Job.JOB_STATE_RUNNING);
+    storage.store(JobImpl.class, jobImpl);
+    connectionWrapper.connect();
+    jobImpl.setStatus(Job.JOB_STATE_ERROR);
+    storage.store(JobImpl.class, jobImpl);
+    replay(viewContext, connect, hdfsApiSupplier, dataStorageSupplier, connectionWrapper,
+            storage, jobImpl, connectionSupplier, delegate, statement, resultSet);
+
+    operationControl.tell(executeJob, ActorRef.noSender());
+    Thread.sleep(5000);
+    verify(connect, hdfsApiSupplier, dataStorageSupplier, connectionWrapper,
+            storage, jobImpl, connectionSupplier, delegate, statement, resultSet);
+  }
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/HiveJdbcConnectionDelegateTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/HiveJdbcConnectionDelegateTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/HiveJdbcConnectionDelegateTest.java
new file mode 100644
index 0000000..1d43fdc
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/HiveJdbcConnectionDelegateTest.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.actor.message.GetColumnMetadataJob;
+import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.jdbc.HiveQueryResultSet;
+import org.apache.hive.jdbc.HiveStatement;
+import org.junit.Test;
+
+import java.sql.DatabaseMetaData;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.assertEquals;
+
+public class HiveJdbcConnectionDelegateTest {
+
+
+  @Test
+  public void testCreateStatement() throws SQLException {
+    HiveConnection hiveConnection = createNiceMock(HiveConnection.class);
+    HiveStatement hiveStatement = createNiceMock(HiveStatement.class);
+    expect(hiveConnection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)).andReturn(hiveStatement);
+    replay(hiveConnection);
+    HiveStatement statement = new HiveJdbcConnectionDelegate().createStatement(hiveConnection);
+    assertEquals(hiveStatement, statement);
+
+  }
+
+
+  @Test
+  public void testExecute() throws SQLException {
+    HiveConnection hiveConnection = createNiceMock(HiveConnection.class);
+    HiveStatement hiveStatement = createNiceMock(HiveStatement.class);
+    HiveQueryResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+    expect(hiveConnection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)).andReturn(hiveStatement);
+    String query = "select * from test";
+    expect(hiveStatement.execute(query)).andReturn(true);
+    expect(hiveStatement.getResultSet()).andReturn(resultSet);
+    replay(hiveConnection, hiveStatement, resultSet);
+    HiveJdbcConnectionDelegate hiveJdbcConnectionDelegate = new HiveJdbcConnectionDelegate();
+    Optional<ResultSet> execute = hiveJdbcConnectionDelegate.execute(hiveConnection, query);
+    assertEquals(execute.get(), resultSet);
+    verify(hiveConnection, hiveStatement, resultSet);
+
+  }
+
+
+  @Test
+  public void testGetColumnMetaData() throws SQLException {
+
+    HiveConnection hiveConnection = createNiceMock(HiveConnection.class);
+    DatabaseMetaData metadata = createNiceMock(DatabaseMetaData.class);
+    expect(hiveConnection.getMetaData()).andReturn(metadata);
+    ResultSet resultSet = createNiceMock(ResultSet.class);
+    expect(metadata.getColumns(anyString(), anyString(), anyString(), anyString())).andReturn(resultSet);
+    replay(hiveConnection, metadata, resultSet);
+    HiveJdbcConnectionDelegate hiveJdbcConnectionDelegate = new HiveJdbcConnectionDelegate();
+    ResultSet columnMetadata = hiveJdbcConnectionDelegate.getColumnMetadata(hiveConnection, new GetColumnMetadataJob("", "", "", ""));
+    assertEquals(resultSet, columnMetadata);
+    verify(hiveConnection, metadata, resultSet);
+  }
+
+
+  @Test
+  public void testCancel() throws SQLException {
+    HiveConnection hiveConnection = createNiceMock(HiveConnection.class);
+    HiveStatement hiveStatement = createNiceMock(HiveStatement.class);
+    HiveQueryResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+    expect(hiveConnection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)).andReturn(hiveStatement);
+    String query = "select * from test";
+    expect(hiveStatement.execute(query)).andReturn(true);
+    expect(hiveStatement.getResultSet()).andReturn(resultSet);
+    hiveStatement.cancel();
+    resultSet.close();
+    replay(hiveConnection, hiveStatement, resultSet);
+    HiveJdbcConnectionDelegate hiveJdbcConnectionDelegate = new HiveJdbcConnectionDelegate();
+    hiveJdbcConnectionDelegate.execute(hiveConnection, query);
+    hiveJdbcConnectionDelegate.cancel();
+    hiveJdbcConnectionDelegate.closeResultSet();
+    hiveJdbcConnectionDelegate.closeStatement();
+    verify(hiveConnection, hiveStatement, resultSet);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/JobExecutionTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/JobExecutionTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/JobExecutionTest.java
new file mode 100644
index 0000000..94ebb27
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/JobExecutionTest.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Props;
+import akka.testkit.JavaTestKit;
+import com.beust.jcommander.internal.Lists;
+import com.google.common.base.Optional;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.actor.DeathWatch;
+import org.apache.ambari.view.hive20.actor.OperationController;
+import org.apache.ambari.view.hive20.actor.message.Connect;
+import org.apache.ambari.view.hive20.actor.message.ExecuteJob;
+import org.apache.ambari.view.hive20.actor.message.HiveJob;
+import org.apache.ambari.view.hive20.actor.message.SQLStatementJob;
+import org.apache.ambari.view.hive20.internal.ConnectionSupplier;
+import org.apache.ambari.view.hive20.internal.DataStorageSupplier;
+import org.apache.ambari.view.hive20.internal.HdfsApiSupplier;
+import org.apache.ambari.view.hive20.internal.HiveConnectionWrapper;
+import org.apache.ambari.view.hive20.persistence.Storage;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.jdbc.HiveQueryResultSet;
+import org.apache.hive.jdbc.HiveStatement;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.sql.ResultSet;
+import java.util.HashMap;
+
+import static org.easymock.EasyMock.*;
+
+public class JobExecutionTest {
+
+  private ActorSystem actorSystem;
+
+  @Before
+  public void setUp() throws Exception {
+    actorSystem = ActorSystem.create("TestingActorSystem");
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    JavaTestKit.shutdownActorSystem(actorSystem);
+  }
+
+  @Test
+  public void testExecuteJob() throws Exception {
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    ConnectionSupplier connectionSupplier = createNiceMock(ConnectionSupplier.class);
+    DataStorageSupplier dataStorageSupplier = createNiceMock(DataStorageSupplier.class);
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+    HdfsApiSupplier hdfsApiSupplier = createNiceMock(HdfsApiSupplier.class);
+    Connect connect = createNiceMock(Connect.class);
+    Storage storage = createNiceMock(Storage.class);
+    JobImpl jobImpl = createNiceMock(JobImpl.class);
+    ResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+    HiveStatement statement = createNiceMock(HiveStatement.class);
+    ConnectionDelegate delegate = createNiceMock(ConnectionDelegate.class);
+    HiveConnectionWrapper connectionWrapper = createNiceMock(HiveConnectionWrapper.class);
+    HiveConnection hiveConnection = createNiceMock(HiveConnection.class);
+    HiveJob test = new SQLStatementJob(HiveJob.Type.ASYNC, new String[]{"select * from test"}, "test", "1", "test.log");
+    ExecuteJob executeJob = new ExecuteJob(connect, test);
+    ActorRef deathwatch = actorSystem.actorOf(Props.create(DeathWatch.class));
+    ActorRef operationControl = actorSystem.actorOf(
+            Props.create(OperationController.class, actorSystem, deathwatch, viewContext, connectionSupplier, dataStorageSupplier, hdfsApiSupplier), "operationController-test");
+    expect(hdfsApiSupplier.get(viewContext)).andReturn(Optional.of(hdfsApi));
+    expect(viewContext.getProperties()).andReturn(new HashMap<String, String>()).anyTimes();
+    expect(connect.getConnectable(anyObject(AuthParams.class))).andReturn(connectionWrapper);
+    expect(connectionWrapper.isOpen()).andReturn(false);
+    expect(connectionWrapper.getConnection()).andReturn(Optional.of(hiveConnection)).anyTimes();
+    expect(dataStorageSupplier.get(viewContext)).andReturn(storage);
+    expect(connectionSupplier.get(viewContext)).andReturn(delegate);
+    expect(storage.load(JobImpl.class, "1")).andReturn(jobImpl).anyTimes();
+    expect(delegate.createStatement(hiveConnection)).andReturn(statement);
+    expect(delegate.execute("select * from test")).andReturn(Optional.of(resultSet));
+    expect(statement.getQueryLog()).andReturn(Lists.<String>newArrayList());
+    expect(jobImpl.getDateSubmitted()).andReturn(0L).times(2);
+    jobImpl.setStatus(Job.JOB_STATE_RUNNING);
+    storage.store(JobImpl.class, jobImpl);
+    connectionWrapper.connect();
+    jobImpl.setStatus(Job.JOB_STATE_FINISHED);
+    storage.store(JobImpl.class, jobImpl);
+    replay(viewContext, connect, hdfsApiSupplier, dataStorageSupplier, connectionWrapper,
+            storage, jobImpl, connectionSupplier, delegate, statement, resultSet);
+
+    operationControl.tell(executeJob, ActorRef.noSender());
+    Thread.sleep(5000);
+    verify(connect, hdfsApiSupplier, dataStorageSupplier, connectionWrapper,
+            storage, jobImpl, connectionSupplier, delegate, statement, resultSet);
+
+  }
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/PropertyValidatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/PropertyValidatorTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/PropertyValidatorTest.java
new file mode 100644
index 0000000..d9c4b2a
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/PropertyValidatorTest.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import org.apache.ambari.view.ViewInstanceDefinition;
+import org.apache.ambari.view.validation.Validator;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+public class PropertyValidatorTest {
+
+  @Test
+  public void testValidatePropertyWebHDFSCom() throws Exception {
+    PropertyValidator validator = new PropertyValidator();
+    ViewInstanceDefinition definition = getViewInstanceDefinition();
+
+    definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "hdfs://hostname.com:8020");
+
+    assertTrue(validator.validateProperty(PropertyValidator.WEBHDFS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+
+    definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "webhdfs://hostname.com:50070");
+
+    assertTrue(validator.validateProperty(PropertyValidator.WEBHDFS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+
+    definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "http://hostname.com:50070");
+
+    assertFalse(validator.validateProperty(PropertyValidator.WEBHDFS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+  }
+
+  @Test
+  public void testValidatePropertyWebHDFSInternal() throws Exception {
+    PropertyValidator validator = new PropertyValidator();
+    ViewInstanceDefinition definition = getViewInstanceDefinition();
+
+    definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "hdfs://hostname.internal:8020");
+
+    assertTrue(validator.validateProperty(PropertyValidator.WEBHDFS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+
+    definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "webhdfs://hostname.internal:50070");
+
+    assertTrue(validator.validateProperty(PropertyValidator.WEBHDFS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+
+    definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "swebhdfs://hostname.internal:50070");
+
+    assertTrue(validator.validateProperty(PropertyValidator.WEBHDFS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+
+    definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "http://hostname.internal:50070");
+
+    assertFalse(validator.validateProperty(PropertyValidator.WEBHDFS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+  }
+
+  @Test
+  public void testValidatePropertyATSCom() throws Exception {
+    PropertyValidator validator = new PropertyValidator();
+    ViewInstanceDefinition definition = getViewInstanceDefinition();
+
+    definition.getPropertyMap().put(PropertyValidator.YARN_ATS_URL, "http://hostname.com:8088");
+
+    assertTrue(validator.validateProperty(PropertyValidator.YARN_ATS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+  }
+
+  @Test
+  public void testValidatePropertyATSInternal() throws Exception {
+    PropertyValidator validator = new PropertyValidator();
+    ViewInstanceDefinition definition = getViewInstanceDefinition();
+
+    definition.getPropertyMap().put(PropertyValidator.YARN_ATS_URL, "http://hostname.internal:8088");
+
+    assertTrue(validator.validateProperty(PropertyValidator.YARN_ATS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+  }
+
+  private ViewInstanceDefinition getViewInstanceDefinition() {
+    ViewInstanceDefinition definition = EasyMock.createNiceMock(ViewInstanceDefinition.class);
+    expect(definition.getClusterHandle()).andReturn(null).anyTimes();
+    Map<String, String> properties = new HashMap<String, String>();
+    expect(definition.getPropertyMap()).andReturn(properties).anyTimes();
+    replay(definition);
+    return definition;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ResultSetIteratorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ResultSetIteratorTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ResultSetIteratorTest.java
new file mode 100644
index 0000000..f7db199
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ResultSetIteratorTest.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.hive20;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Props;
+import akka.testkit.JavaTestKit;
+import org.apache.ambari.view.hive20.actor.HiveActor;
+import org.apache.ambari.view.hive20.actor.ResultSetIterator;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.actor.message.ResetCursor;
+import org.apache.ambari.view.hive20.actor.message.job.Next;
+import org.apache.hive.jdbc.HiveQueryResultSet;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+
+import static org.easymock.EasyMock.*;
+
+
+public class ResultSetIteratorTest {
+
+  private ActorSystem actorSystem;
+
+  @Before
+  public void setUp() throws Exception {
+    actorSystem = ActorSystem.create("TestingActorSystem");
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    JavaTestKit.shutdownActorSystem(actorSystem);
+  }
+
+  @Test
+  public void testGetNext() throws Exception {
+    ResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+    ResultSetMetaData resultSetMetaData = createNiceMock(ResultSetMetaData.class);
+
+    ActorRef parent = actorSystem.actorOf(
+            Props.create(TestParent.class));
+    ActorRef rsi = actorSystem.actorOf(
+            Props.create(ResultSetIterator.class, parent, resultSet));
+    expect(resultSet.getMetaData()).andReturn(resultSetMetaData);
+    expect(resultSetMetaData.getColumnCount()).andReturn(1);
+    expect(resultSetMetaData.getColumnName(1)).andReturn("test");
+    expect(resultSetMetaData.getColumnTypeName(1)).andReturn("string");
+    replay(resultSet, resultSetMetaData);
+    rsi.tell(new Next(), parent);
+    Thread.sleep(2000);
+    verify(resultSet, resultSetMetaData);
+
+  }
+
+  @Test
+  public void testResetCursor() throws Exception {
+    ResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+
+    ActorRef parent = actorSystem.actorOf(
+            Props.create(TestParent.class));
+    ActorRef rsi = actorSystem.actorOf(
+            Props.create(ResultSetIterator.class, parent, resultSet));
+    resultSet.beforeFirst();
+    replay(resultSet);
+    rsi.tell(new ResetCursor(), parent);
+    Thread.sleep(2000);
+    verify(resultSet);
+
+  }
+
+
+  private static class TestParent extends HiveActor {
+
+    @Override
+    public void handleMessage(HiveMessage hiveMessage) {
+
+    }
+  }
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtilsTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtilsTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtilsTest.java
new file mode 100644
index 0000000..86170e7
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtilsTest.java
@@ -0,0 +1,66 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.ambari.view.hive20.internal.parsers;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.List;
+
+public class ParserUtilsTest {
+
+  @Test
+  public void parseColumnDataTypeDecimalTest(){
+    String columnDataTypeString = " decimal(10,2) ";
+    List<String> list = ParserUtils.parseColumnDataType(columnDataTypeString);
+    Assert.assertEquals("Must contain 3 elements : ", 3, list.size());
+    Assert.assertEquals("Failed to find datatype. ", "decimal", list.get(0));
+    Assert.assertEquals("Failed to find precision. ", "10", list.get(1));
+    Assert.assertEquals("Failed to find scale. ", "2", list.get(2));
+  }
+
+  @Test
+  public void parseColumnDataTypeDecimalWithSpaceTest(){
+    String columnDataTypeString = " decimal ( 10 ,   2 ) ";
+    List<String> list = ParserUtils.parseColumnDataType(columnDataTypeString);
+    Assert.assertEquals("Must contain 3 elements : ", 3, list.size());
+    Assert.assertEquals("Failed to find datatype. ", "decimal", list.get(0));
+    Assert.assertEquals("Failed to find precision. ", "10", list.get(1));
+    Assert.assertEquals("Failed to find scale. ", "2", list.get(2));
+  }
+
+  @Test
+  public void parseColumnDataTypeVarcharTest(){
+    String columnDataTypeString = " VARCHAR( 10)  ";
+    List<String> list = ParserUtils.parseColumnDataType(columnDataTypeString);
+    Assert.assertEquals("Must contain 2 elements : ", 3, list.size());
+    Assert.assertEquals("Failed to find datatype. ", "VARCHAR", list.get(0));
+    Assert.assertEquals("Failed to find precision. ", "10", list.get(1));
+    Assert.assertNull("Scale should be null. ", list.get(2));
+  }
+
+  @Test
+  public void parseColumnDataTypeBooleanTest(){
+    String columnDataTypeString = " BOOLEAN  ";
+    List<String> list = ParserUtils.parseColumnDataType(columnDataTypeString);
+    Assert.assertEquals("Must contain 1 elements : ", 3, list.size());
+    Assert.assertEquals("Failed to find datatype. ", "BOOLEAN", list.get(0));
+    Assert.assertNull("Precision should be null. ", list.get(1));
+    Assert.assertNull("Scale should be null. ", list.get(2));
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerationSpecTest.groovy
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerationSpecTest.groovy b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerationSpecTest.groovy
new file mode 100644
index 0000000..874e268
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerationSpecTest.groovy
@@ -0,0 +1,59 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+
+package org.apache.ambari.view.hive20.internal.query.generators
+
+import spock.lang.Specification
+
+class AlterTableQueryGenerationSpecTest extends Specification {
+//  def "alter simple table"() {
+//    // blocks go here
+//    setup:
+//    def oldTableMeta = new TableMeta()
+//    def newTableMeta = new TableMeta()
+//    def oldCols = new ArrayList<>();
+//    oldCols.add(new ColumnInfo())
+//    oldTableMeta.setColumns()
+//
+//    when:
+//    stack.push(elem)
+//
+//    then:
+//    println "inside AlterTableQueryGenerationSpecTest"
+//    !stack.empty
+//    stack.size() == 1
+//    stack.peek() == elem
+//  }
+//
+//  def "pushing again an element on the stack"() {
+//    // blocks go here
+//    setup:
+//    def stack = new Stack()
+//    def elem = "push me"
+//
+//    when:
+//    stack.push(elem)
+//
+//    then:
+//    println "inside AlterTableQueryGenerationSpecTest"
+//    !stack.empty
+//    stack.size() == 1
+//    stack.peek() == elem
+//  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGeneratorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGeneratorTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGeneratorTest.java
new file mode 100644
index 0000000..45f29da
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGeneratorTest.java
@@ -0,0 +1,94 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.internal.query.generators;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
+import org.apache.ambari.view.hive20.internal.dto.TableMeta;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.Arrays;
+import java.util.List;
+
+public class AlterTableQueryGeneratorTest {
+  @Test
+  public void getQuery() throws Exception {
+
+  }
+
+  @Test
+  public void generateColumnQuery() throws Exception {
+
+  }
+
+  @Test
+  public void createColumnQueriesForSuccessfulChangeColumn() throws Exception {
+    ColumnInfo colInfo1 = new ColumnInfo("col1", "CHAR(1)", "COMMENT 1"); // with comment
+    ColumnInfo colInfo2 = new ColumnInfo("col2", "DECIMAL(10,5)"); // no comment
+    ColumnInfo colInfo3 = new ColumnInfo("col3", "STRING", "COMMENT-3");
+    ColumnInfo colInfo4 = new ColumnInfo("col4", "VARCHAR(10)", "COMMENT 4");
+    ColumnInfo colInfo5 = new ColumnInfo("col5", "STRING", "COMMENT 5");
+    ColumnInfo colInfo6 = new ColumnInfo("col6", "INT");
+    List<ColumnInfo> oldColumns = Arrays.asList(colInfo1, colInfo2, colInfo3);
+    List<ColumnInfo> newColumns = Arrays.asList(colInfo4, colInfo5, colInfo6); // all changed
+    Optional<List<String>> query = AlterTableQueryGenerator.createColumnQueries(oldColumns, newColumns, false);
+
+    Assert.assertTrue(query.isPresent());
+    List<String> queries = query.get();
+
+    Assert.assertEquals("Expected number of column update queries were different.", 3, queries.size());
+    String[] expectedQueries = new String[]{" CHANGE COLUMN `col1` `col4` VARCHAR(10) COMMENT \'COMMENT 4\'", " CHANGE COLUMN `col2` `col5` STRING COMMENT \'COMMENT 5\'", " CHANGE COLUMN `col3` `col6` INT"};
+
+    Assert.assertArrayEquals("Column change queries were not equal ", expectedQueries, queries.toArray());
+  }
+
+  @Test
+  public void createColumnQueriesForSuccessfulChangeAndAddColumn() throws Exception {
+
+    TableMeta oldMeta = new TableMeta();
+    TableMeta newMeta = new TableMeta();
+
+    ColumnInfo colInfo1 = new ColumnInfo("col1", "CHAR(1)", "COMMENT 1"); // with comment
+    ColumnInfo colInfo2 = new ColumnInfo("col2", "DECIMAL(10,5)"); // no comment
+    ColumnInfo colInfo3 = new ColumnInfo("col3", "STRING", "COMMENT-3");
+    ColumnInfo colInfo4 = new ColumnInfo("col4", "VARCHAR(10)", "COMMENT 4");
+    ColumnInfo colInfo5 = new ColumnInfo("col5", "STRING", "COMMENT 5");
+    ColumnInfo colInfo6 = new ColumnInfo("col6", "INT");
+    ColumnInfo colInfo7 = new ColumnInfo("col7", "DATE");
+    ColumnInfo colInfo8 = new ColumnInfo("col8", "BOOLEAN", "COMMENT 8");
+
+    List<ColumnInfo> oldColumns = Arrays.asList(colInfo1, colInfo2, colInfo3);
+    oldMeta.setColumns(oldColumns);
+
+    List<ColumnInfo> newColumns = Arrays.asList(colInfo4, colInfo5, colInfo6, colInfo7, colInfo8); // all changed
+    oldMeta.setColumns(newColumns);
+
+    Optional<List<String>> query = AlterTableQueryGenerator.createColumnQueries(oldColumns, newColumns, false);
+
+    Assert.assertTrue(query.isPresent());
+    List<String> queries = query.get();
+
+    Assert.assertEquals("Expected number of column update queries were different.", 4, queries.size());
+    System.out.println(queries);
+    String[] expectedQueries = new String[]{" CHANGE COLUMN `col1` `col4` VARCHAR(10) COMMENT \'COMMENT 4\'", " CHANGE COLUMN `col2` `col5` STRING COMMENT \'COMMENT 5\'", " CHANGE COLUMN `col3` `col6` INT"," ADD COLUMNS ( `col7` DATE, `col8` BOOLEAN COMMENT \'COMMENT 8\' )" };
+
+    Assert.assertArrayEquals("Column change queries were not equal ", expectedQueries, queries.toArray());
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGeneratorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGeneratorTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGeneratorTest.java
new file mode 100644
index 0000000..f5a9778
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGeneratorTest.java
@@ -0,0 +1,101 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.ambari.view.hive20.internal.query.generators;
+
+import com.google.common.base.Optional;
+import com.google.gson.Gson;
+import org.apache.ambari.view.hive20.internal.dto.TableMeta;
+import org.junit.Assert;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class CreateTableQueryGeneratorTest {
+  private static final Logger LOG = LoggerFactory.getLogger(CreateTableQueryGeneratorTest.class);
+
+  @Test
+  public void testGetQuery() throws Exception {
+    String json = "{\n" +
+      "\t\"id\": \"d1/t2\",\n" +
+      "\t\"database\": \"d1\",\n" +
+      "\t\"table\": \"t2\",\n" +
+      "\t\"columns\": [{\n" +
+      "\t\t\"name\": \"col_name1\",\n" +
+      "\t\t\"type\": \"string\",\n" +
+      "\t\t\"comment\": \"col_name1 comment\"\n" +
+      "\t}, {\n" +
+      "\t\t\"name\": \"col_name2\",\n" +
+      "\t\t\"type\": \"decimal(10,2)\",\n" +
+      "\t\t\"comment\": \"col_name2 comment\"\n" +
+      "\t}],\n" +
+      "\t\"ddl\": \"CREATE TABLE `t2`(\\n  `col_name1` string COMMENT \\u0027col_name1 comment\\u0027, \\n  `col_name2` decimal(10,2) COMMENT \\u0027col_name2 comment\\u0027)\\nCOMMENT \\u0027table t1 comment\\u0027\\nPARTITIONED BY ( \\n  `col_name3` string COMMENT \\u0027col_name3 comment\\u0027, \\n  `col_name4` char(1) COMMENT \\u0027col_name4 comment\\u0027)\\nCLUSTERED BY ( \\n  col_name1, \\n  col_name2) \\nSORTED BY ( \\n  col_name1 ASC, \\n  col_name2 DESC) \\nINTO 5 BUCKETS\\nROW FORMAT DELIMITED \\n  FIELDS TERMINATED BY \\u0027,\\u0027 \\nWITH SERDEPROPERTIES ( \\n  \\u0027escape.delim\\u0027\\u003d\\u0027\\\\\\\\\\u0027) \\nSTORED AS INPUTFORMAT \\n  \\u0027org.apache.hadoop.mapred.SequenceFileInputFormat\\u0027 \\nOUTPUTFORMAT \\n  \\u0027org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat\\u0027\\nLOCATION\\n  \\u0027hdfs://c6401.ambari.apache.org:8020/user/hive/tables/d1/t1\\u0027\\nTBLPROPERTIES (\\n  \\u0027NO_AUTO_COMPACTION\\u0027\\u003d\\u0027true\\u0027, 
 \\n  \\u0027immutable\\u0027\\u003d\\u0027false\\u0027, \\n  \\u0027orc.compress\\u0027\\u003d\\u0027SNAPPY\\u0027, \\n  \\u0027transient_lastDdlTime\\u0027\\u003d\\u00271481520077\\u0027)\\n\",\n" +
+      "\t\"partitionInfo\": {\n" +
+      "\t\t\"columns\": [{\n" +
+      "\t\t\t\"name\": \"col_name4\",\n" +
+      "\t\t\t\"type\": \"char(1)\",\n" +
+      "\t\t\t\"comment\": \"col_name4 comment\"\n" +
+      "\t\t}, {\n" +
+      "\t\t\t\"name\": \"col_name3\",\n" +
+      "\t\t\t\"type\": \"string\",\n" +
+      "\t\t\t\"comment\": \"col_name3 comment\"\n" +
+      "\t\t}]\n" +
+      "\t},\n" +
+      "\t\"detailedInfo\": {\n" +
+      "\t\t\"dbName\": \"d1\",\n" +
+      "\t\t\"owner\": \"admin\",\n" +
+      "\t\t\"createTime\": \"Mon Dec 12 05:21:17 UTC 2016\",\n" +
+      "\t\t\"lastAccessTime\": \"UNKNOWN\",\n" +
+      "\t\t\"retention\": \"0\",\n" +
+      "\t\t\"tableType\": \"MANAGED_TABLE\",\n" +
+      "\t\t\"location\": \"hdfs://c6401.ambari.apache.org:8020/user/hive/tables/d1/t1\",\n" +
+      "\t\t\"parameters\": {\n" +
+      "\t\t\t\"immutable\": \"false\",\n" +
+      "\t\t\t\"orc.compress\": \"SNAPPY\",\n" +
+      "\t\t\t\"transient_lastDdlTime\": \"1481520077\",\n" +
+      "\t\t\t\"NO_AUTO_COMPACTION\": \"true\",\n" +
+      "\t\t\t\"comment\": \"table t1 comment\",\n" +
+      "\t\t\t\"SORTBUCKETCOLSPREFIX\": \"TRUE\"\n" +
+      "\t\t}\n" +
+      "\t},\n" +
+      "\t\"storageInfo\": {\n" +
+      "\t\t\"serdeLibrary\": \"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe\",\n" +
+      "\t\t\"inputFormat\": \"org.apache.hadoop.mapred.SequenceFileInputFormat\",\n" +
+      "\t\t\"outputFormat\": \"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat\",\n" +
+      "\t\t\"compressed\": \"No\",\n" +
+      "\t\t\"numBuckets\": \"5\",\n" +
+      "\t\t\"bucketCols\": [\"col_name1\", \" col_name2\"],\n" +
+      "\t\t\"sortCols\": [{\n" +
+      "\t\t\t\"columnName\": \"col_name1\",\n" +
+      "\t\t\t\"order\": \"ASC\"\n" +
+      "\t\t}, {\n" +
+      "\t\t\t\"columnName\": \"col_name2\",\n" +
+      "\t\t\t\"order\": \"DESC\"\n" +
+      "\t\t}],\n" +
+      "\t\t\"parameters\": {\n" +
+      "\t\t\t\"escape.delim\": \"\\\\\\\\\",\n" +
+      "\t\t\t\"field.delim\": \",\",\n" +
+      "\t\t\t\"serialization.format\": \",\"\n" +
+      "\t\t}\n" +
+      "\t}\n" +
+      "}";
+    TableMeta tableMeta = new Gson().fromJson(json, TableMeta.class);
+    Optional<String> createQuery = new CreateTableQueryGenerator(tableMeta).getQuery();
+    LOG.info("createQuery : {}", createQuery);
+    Assert.assertTrue(createQuery.isPresent());
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/CSVParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/CSVParserTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/CSVParserTest.java
new file mode 100644
index 0000000..8f0ac47
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/CSVParserTest.java
@@ -0,0 +1,275 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.upload;
+
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.csv.commonscsv.CSVParser;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+public class CSVParserTest {
+
+  /**
+   * no exception in creating csvParser with emtpy stream
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyStream() throws Exception {
+    String csv = "";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, new ParseOptions());
+      ) {
+      Assert.assertEquals("There should not be any rows.",false, jp.iterator().hasNext());
+    }
+  }
+
+  /**
+   * in case of csv an empty line is still considered as row
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyRow() throws Exception {
+    String csv = "       ";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, new ParseOptions());
+      ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator should be Empty", true, iterator.hasNext());
+      Assert.assertArrayEquals("Row should not be empty",new Object[]{"       "},iterator.next().getRow());
+    }
+  }
+
+  @Test
+  public void testParse1Row() throws Exception {
+    String csv = "value1,c,10,10.1";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, new ParseOptions());
+      ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", true, iterator.hasNext());
+      Row row = iterator.next();
+      Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"});
+      Assert.assertEquals("Row not equal!", expected, row);
+
+      Assert.assertEquals("Should report no more rows!", false, iterator.hasNext());
+    }
+  }
+
+  @Test
+  public void testParseMultipleRow() throws Exception {
+
+    String csv = "value1,c,10,10.1\n" +
+            "value2,c2,102,true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, new ParseOptions());
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next());
+
+      Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next());
+
+      Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext());
+      Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
+    }
+  }
+
+
+  @Test
+  public void testQuotedEndline() throws Exception {
+
+    String csv = "\"row1-\ncol1\",1,1.1\n\"row2-\\\ncol1\",2,2.2\n";
+    ParseOptions po = new ParseOptions();
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"row1-\ncol1", "1", "1.1"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+
+      Row row2 = new Row(new Object[]{"row2-\\\ncol1", "2", "2.2"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row2, iterator.next());
+
+    }
+  }
+
+  @Test
+  public void testQuotedDoubleQuote() throws Exception {
+
+    String csv = "\"aaa\",\"b\"\"bb\",\"ccc\"";
+    ParseOptions po = new ParseOptions();
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void testSpecialEscape() throws Exception {
+
+    String csv = "\"aaa\",\"b$\"bb\",\"ccc\"";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'$');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void testSpecialEscapedEscape() throws Exception {
+
+    String csv = "aaa,b$$bb,ccc";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'$');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b$bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void test001Escape() throws Exception {
+
+    String csv = "aaa,b\001\"bb,ccc";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'\001');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());    }
+  }
+
+  @Test
+  public void testSpecialQuote() throws Exception {
+
+    String csv = "\001aaa\001,\001b\001\001bb\001,\001ccc\001";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_QUOTE,'\001');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b\001bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void testSpaceAsDelimiterAndQuoted() throws Exception {
+
+    String csv = "aaa \"b bb\" ccc\naaa2 bbb2 \"c cc2\"";
+    ParseOptions po = new ParseOptions();
+//    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'\001');
+    po.setOption(ParseOptions.OPTIONS_CSV_DELIMITER,' ');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+
+      Row row2 = new Row(new Object[]{"aaa2", "bbb2", "c cc2"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row2, iterator.next());
+    }
+  }
+
+  @Test
+  public void testFailedDelimiterEscaped() throws Exception {
+
+    String csv = "aaa,b\\,bb,ccc";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'\\');
+    po.setOption(ParseOptions.OPTIONS_CSV_DELIMITER,',');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b,bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java
new file mode 100644
index 0000000..8006e91
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java
@@ -0,0 +1,326 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.upload;
+
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.client.ColumnDescriptionShort;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.PreviewData;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+
+public class DataParserCSVTest {
+  @Test
+  public void testParsePreviewCSV() throws Exception {
+    String str = "1,a\n" +
+            "2,b\n" +
+            "3,c\n";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+
+    try (
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions);
+    ){
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(2, pd.getPreviewRows().size()); // now it will not return the first row which is header
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
+              new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
+
+      Object cols2[] = new Object[2];
+      cols2[0] = "2";
+      cols2[1] = "b";
+      Row row2 = new Row(cols2);
+
+      Object cols3[] = new Object[2];
+      cols3[0] = "3";
+      cols3[1] = "c";
+      Row row3 = new Row(cols3);
+
+      Row[] rows = { row2, row3};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+
+  /**
+   * even if in one of the preview rows, datatype is not correct, then it should be assigned that datatype.
+   * but if first row is header then first row should not be acconted for detecting datatype
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewDataTypeDetectionCSV() throws Exception {
+    String str = "1,a,10,k\n" +
+      "2,b,6,8\n" +
+      "2.2,b,7,9\n" +
+      "2,b,abc,1\n" +
+      "2,b,9,3\n" +
+      "2,b,8,5\n" +
+      "2,b,7,3\n" +
+      "2,b,6,3\n" +
+      "3,c,c,3\n";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    try(StringReader sr = new StringReader(str);
+      DataParser dp= new DataParser(sr, parseOptions)) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(4, pd.getHeader().size());
+      ColumnDescription[] cd = {
+        // as row 3 contains 2.2
+        new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 0),
+        // as all are chars
+        new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1),
+        // as row 4 contains abc
+        new ColumnDescriptionImpl("10", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+        // although row 1 contains k but it is in header and not counted in detecting datatype
+        new ColumnDescriptionImpl("k", ColumnDescriptionShort.DataTypes.INT.toString(), 3)};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+    }
+  }
+
+  /**
+   * even if in one of the preview rows, datatype is not correct, then it should be assigned that datatype.
+   * but if first row is header then first row should not be acconted for detecting datatype
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewDataTypeDetection2CSV() throws Exception {
+    String str = "1,a,10,k\n" +
+      "2,b,6,p\n" +
+      "2.2,b,7,9\n" +
+      "2,b,2.2,1\n" +
+      "2,b,9,3\n" +
+      "2,b,8,5\n" +
+      "2,b,7,3\n" +
+      "2,b,6,3\n" +
+      "3,c,c,3\n";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+
+    try(StringReader sr = new StringReader(str);
+        DataParser dp = new DataParser(sr, parseOptions)) {
+
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(4, pd.getHeader().size());
+      ColumnDescription[] cd = {
+        // as row 3 contains 2.2
+        new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 0),
+        // as all are chars
+        new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1),
+        // some are int, char and some double .. nothing other than 'string' satisfies all the rows
+        new ColumnDescriptionImpl("10", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+        // although row 1 contains k but it is in header and not counted in detecting datatype
+        // but row 2 also has a char p which will be acconted for datatype detection
+        new ColumnDescriptionImpl("k", ColumnDescriptionShort.DataTypes.CHAR.toString(), 3)};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+    }
+  }
+
+  /**
+   * One row csv will give default column names and 1st row in preview if HEADER.PROVIDED_BY_USER is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowCSV() throws Exception {
+    String str = "1,a\n" ;
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.PROVIDED_BY_USER.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions)
+    ) {
+
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("column1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
+        new ColumnDescriptionImpl("column2", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "1";
+      cols1[1] = "a";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+
+  /**
+   * One row csv will throw exception in preview if HEADER.FIRST_RECORD is selected.
+   * @throws IOException
+   */
+  @Test(expected = java.util.NoSuchElementException.class)
+  public void testParsePreview1RowCSVFirstRowHeader() throws Exception {
+    String str = "col1,col2\n" ;
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions)
+    ) {
+
+
+      PreviewData pd = dp.parsePreview();
+    }
+  }
+
+  /**
+   * more number of columns in a row => igore the extra columns. Number of columns is decided by the first row.
+   * If other row contains more columns then those columns will be ignored
+   * Here first row has 2 columns and second row has 3 columns so the value 'x' is ignored
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVMoreColumns() throws Exception {
+    String str = "1,a\n" +
+            "2,b,x\n" +  // contains 3 cols, more number of columns
+            "3,c\n";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+
+    try(
+        StringReader sr = new StringReader(str);
+        DataParser dp = new DataParser(sr, parseOptions)
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+      Row row = new Row(new Object[]{"2","b"});
+
+      Assert.assertArrayEquals("Additional columns not properly handled.", row.getRow(),pd.getPreviewRows().get(0).getRow());
+    }
+  }
+
+  /**
+   * less number of columns => treat missing values as null. Number of columns is decided by the first row of the table
+   * if other rows has less number of columns then it treats other columns as null
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVLessColumns() throws Exception {
+    String str = "1,a\n" +
+            "2\n" +  // contains 1 col, less number of columns
+            "3,c\n";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp =  new DataParser(sr, parseOptions)
+      ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertEquals("Missing value not detected as null.",pd.getPreviewRows().get(1).getRow()[1],null);
+    }
+  }
+
+  /**
+   * empty values are treated as empty string
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyColumn() throws Exception {
+    String str = "1,a,x\n" +
+            "2,,y\n" +  // contains 1 col, less number of columns
+            "3,c,z\n";
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions)
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(0).getRow()[1],"");
+    }
+  }
+
+  /**
+   * empty values are treated as empty string
+   * @throws IOException
+   */
+  @Test
+  public void testLastEmptyColumn() throws Exception {
+    String str = "1,a,x\n" +
+            "2,,\n" +  // contains 1 col, less number of columns
+            "3,c,z\n";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions)
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(0).getRow()[1],"");
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(0).getRow()[2],"");
+    }
+  }
+}