You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by st...@apache.org on 2020/07/07 12:47:52 UTC

[phoenix-connectors] branch master updated: PHOENIX-5965 Remove Guava from Phoenix-Connectors

This is an automated email from the ASF dual-hosted git repository.

stoty pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix-connectors.git


The following commit(s) were added to refs/heads/master by this push:
     new 2d297a8  PHOENIX-5965 Remove Guava from Phoenix-Connectors
2d297a8 is described below

commit 2d297a8061dce00df33ec865a666a1656311881c
Author: Beata Sudi <su...@gmail.com>
AuthorDate: Mon Jun 15 13:46:43 2020 +0200

    PHOENIX-5965 Remove Guava from Phoenix-Connectors
---
 .../apache/phoenix/flume/CsvEventSerializerIT.java |  36 +++--
 .../phoenix/flume/JsonEventSerializerIT.java       |  35 +++--
 .../org/apache/phoenix/flume/PhoenixSinkIT.java    |   2 +-
 .../phoenix/flume/RegexEventSerializerIT.java      |  32 +++--
 .../org/apache/phoenix/flume/SchemaHandler.java    |  17 +--
 .../flume/serializer/BaseEventSerializer.java      | 147 +++++++++++----------
 .../flume/serializer/CsvEventSerializer.java       |  26 +++-
 .../flume/serializer/JsonEventSerializer.java      |  18 ++-
 .../flume/serializer/RegexEventSerializer.java     |  15 ++-
 .../org/apache/phoenix/flume/sink/PhoenixSink.java |  25 ++--
 .../java/org/apache/phoenix/hive/HiveTestUtil.java |  16 +--
 .../org/apache/phoenix/hive/PhoenixMetaHook.java   |  30 +++--
 .../java/org/apache/phoenix/hive/PhoenixRow.java   |   4 +-
 .../java/org/apache/phoenix/hive/PhoenixSerDe.java |   6 +-
 .../phoenix/hive/mapreduce/PhoenixInputFormat.java |  18 ++-
 .../phoenix/hive/mapreduce/PhoenixInputSplit.java  |  17 ++-
 .../hive/mapreduce/PhoenixRecordReader.java        |   7 +-
 .../hive/mapreduce/PhoenixResultWritable.java      |  11 +-
 .../PhoenixListObjectInspector.java                |   4 +-
 .../hive/ql/index/IndexPredicateAnalyzer.java      |   5 +-
 .../phoenix/hive/query/PhoenixQueryBuilder.java    | 101 +++++++-------
 .../phoenix/hive/util/ColumnMappingUtils.java      |  13 +-
 .../hive/util/PhoenixStorageHandlerUtil.java       |  18 +--
 .../org/apache/phoenix/hive/util/PhoenixUtil.java  |  14 +-
 .../hive/query/PhoenixQueryBuilderTest.java        |  47 +++----
 .../apache/phoenix/kafka/PhoenixConsumerIT.java    |   6 +-
 .../phoenix/kafka/consumer/PhoenixConsumer.java    |  44 ++++--
 .../org/apache/phoenix/pig/PhoenixHBaseLoader.java |  11 +-
 .../phoenix/pig/util/PhoenixPigSchemaUtilTest.java |   7 +-
 .../pig/util/QuerySchemaParserFunctionTest.java    |   3 +-
 .../pig/util/TableSchemaParserFunctionTest.java    |   6 +-
 .../java/org/apache/phoenix/spark/OrderByIT.java   |   6 +-
 .../java/org/apache/phoenix/spark/SparkUtil.java   |   3 +-
 .../v2/reader/PhoenixDataSourceReadOptions.java    |   6 +-
 .../v2/reader/PhoenixDataSourceReader.java         |  13 +-
 .../v2/reader/PhoenixInputPartitionReader.java     |  10 +-
 .../datasource/v2/writer/PhoenixDataWriter.java    |   5 +-
 pom.xml                                            |   2 +-
 38 files changed, 441 insertions(+), 345 deletions(-)

diff --git a/phoenix-flume/src/it/java/org/apache/phoenix/flume/CsvEventSerializerIT.java b/phoenix-flume/src/it/java/org/apache/phoenix/flume/CsvEventSerializerIT.java
index 842db04..1f4fff9 100644
--- a/phoenix-flume/src/it/java/org/apache/phoenix/flume/CsvEventSerializerIT.java
+++ b/phoenix-flume/src/it/java/org/apache/phoenix/flume/CsvEventSerializerIT.java
@@ -26,6 +26,8 @@ import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -46,9 +48,6 @@ import org.apache.phoenix.flume.sink.PhoenixSink;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.junit.Test;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
 
 public class CsvEventSerializerIT extends BaseHBaseManagedTimeIT {
 
@@ -220,7 +219,7 @@ public class CsvEventSerializerIT extends BaseHBaseManagedTimeIT {
 		String a1 = "\"aaa,bbb,ccc\"";
 		String a2 = "\"1,2,3,4\"";
 		String eventBody = null;
-		List<Event> eventList = Lists.newArrayListWithCapacity(numEvents);
+		List<Event> eventList = new ArrayList<>(numEvents);
 		for (int i = 0; i < eventList.size(); i++) {
 			eventBody = (col1 + i) + "," + i * 10.5 + "," + a1 + "," + a2;
 			Event event = EventBuilder.withBody(Bytes.toBytes(eventBody));
@@ -277,10 +276,10 @@ public class CsvEventSerializerIT extends BaseHBaseManagedTimeIT {
 		String hostHeader = "host1";
 		String sourceHeader = "source1";
 		String eventBody = null;
-		List<Event> eventList = Lists.newArrayListWithCapacity(numEvents);
+		List<Event> eventList = new ArrayList<>(numEvents);
 		for (int i = 0; i < numEvents; i++) {
 			eventBody = (col1 + i) + "," + i * 10.5 + "," + a1 + "," + a2;
-			Map<String, String> headerMap = Maps.newHashMapWithExpectedSize(2);
+			Map<String, String> headerMap = new HashMap<>(2);
 			headerMap.put("host", hostHeader);
 			headerMap.put("source", sourceHeader);
 			Event event = EventBuilder.withBody(Bytes.toBytes(eventBody), headerMap);
@@ -337,7 +336,9 @@ public class CsvEventSerializerIT extends BaseHBaseManagedTimeIT {
 	private void initSinkContext(final String fullTableName, final String ddl, final String columns,
 			final String csvDelimiter, final String csvQuote, final String csvEscape, final String csvArrayDelimiter,
 			final String rowkeyType, final String headers) {
-		Preconditions.checkNotNull(fullTableName);
+		if (fullTableName == null){
+			throw new NullPointerException();
+		}
 		sinkContext = new Context();
 		sinkContext.put(FlumeConstants.CONFIG_TABLE, fullTableName);
 		sinkContext.put(FlumeConstants.CONFIG_JDBC_URL, getUrl());
@@ -370,14 +371,22 @@ public class CsvEventSerializerIT extends BaseHBaseManagedTimeIT {
 	}
 
 	private void setConfig(final String configName, final String configValue) {
-		Preconditions.checkNotNull(sinkContext);
-		Preconditions.checkNotNull(configName);
-		Preconditions.checkNotNull(configValue);
+		if (sinkContext == null){
+			throw new NullPointerException();
+		}
+		if (configName == null){
+			throw new NullPointerException();
+		}
+		if (configValue == null){
+			throw new NullPointerException();
+		}
 		sinkContext.put(configName, configValue);
 	}
 
 	private int countRows(final String fullTableName) throws SQLException {
-		Preconditions.checkNotNull(fullTableName);
+		if (fullTableName == null){
+			throw new NullPointerException();
+		}
 		Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
 		final Connection conn = DriverManager.getConnection(getUrl(), props);
 		ResultSet rs = null;
@@ -401,7 +410,10 @@ public class CsvEventSerializerIT extends BaseHBaseManagedTimeIT {
 	}
 
 	private void dropTable(final String fullTableName) throws SQLException {
-		Preconditions.checkNotNull(fullTableName);
+		if (fullTableName == null){
+			throw new NullPointerException();
+		}
+
 		Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
 		final Connection conn = DriverManager.getConnection(getUrl(), props);
 		try {
diff --git a/phoenix-flume/src/it/java/org/apache/phoenix/flume/JsonEventSerializerIT.java b/phoenix-flume/src/it/java/org/apache/phoenix/flume/JsonEventSerializerIT.java
index 0210bad..2265350 100644
--- a/phoenix-flume/src/it/java/org/apache/phoenix/flume/JsonEventSerializerIT.java
+++ b/phoenix-flume/src/it/java/org/apache/phoenix/flume/JsonEventSerializerIT.java
@@ -26,6 +26,8 @@ import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -46,9 +48,6 @@ import org.apache.phoenix.flume.sink.PhoenixSink;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.junit.Test;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
 
 public class JsonEventSerializerIT extends BaseHBaseManagedTimeIT {
 
@@ -348,7 +347,7 @@ public class JsonEventSerializerIT extends BaseHBaseManagedTimeIT {
 		String a1 = "[aaa,bbb,ccc]";
 		String a2 = "[1,2,3,4]";
 		String eventBody = null;
-		List<Event> eventList = Lists.newArrayListWithCapacity(numEvents);
+		List<Event> eventList =  new ArrayList<>(numEvents);
 		for (int i = 0; i < eventList.size(); i++) {
 			eventBody = "{\"col1\" : \"" + (col1 + i) + "\", \"col2\" : " + i * 10.5 + " , \"col3\" : " + a1
 					+ " , \"col4\" : " + a2 + "}";
@@ -407,11 +406,11 @@ public class JsonEventSerializerIT extends BaseHBaseManagedTimeIT {
 		String hostHeader = "host1";
 		String sourceHeader = "source1";
 		String eventBody = null;
-		List<Event> eventList = Lists.newArrayListWithCapacity(numEvents);
+		List<Event> eventList = new ArrayList<>(numEvents);
 		for (int i = 0; i < numEvents; i++) {
 			eventBody = "{\"col1\" : \"" + (col1 + i) + "\", \"col2\" : " + i * 10.5 + " , \"col3\" : " + a1
 					+ " , \"col4\" : " + a2 + "}";
-			Map<String, String> headerMap = Maps.newHashMapWithExpectedSize(2);
+			Map<String, String> headerMap = new HashMap<>(2);
 			headerMap.put("host", hostHeader);
 			headerMap.put("source", sourceHeader);
 			Event event = EventBuilder.withBody(Bytes.toBytes(eventBody), headerMap);
@@ -467,7 +466,9 @@ public class JsonEventSerializerIT extends BaseHBaseManagedTimeIT {
 
 	private void initSinkContext(final String fullTableName, final String ddl, final String columns,
 			final String columnsMapping, final String rowkeyType, final String headers) {
-		Preconditions.checkNotNull(fullTableName);
+		if (fullTableName == null) {
+			throw new NullPointerException();
+		}
 		sinkContext = new Context();
 		sinkContext.put(FlumeConstants.CONFIG_TABLE, fullTableName);
 		sinkContext.put(FlumeConstants.CONFIG_JDBC_URL, getUrl());
@@ -495,14 +496,22 @@ public class JsonEventSerializerIT extends BaseHBaseManagedTimeIT {
 	}
 
 	private void setConfig(final String configName, final String configValue) {
-		Preconditions.checkNotNull(sinkContext);
-		Preconditions.checkNotNull(configName);
-		Preconditions.checkNotNull(configValue);
+		if (sinkContext == null){
+			throw new NullPointerException();
+		}
+		if (configName == null){
+			throw new NullPointerException();
+		}
+		if (configValue == null){
+			throw new NullPointerException();
+		}
 		sinkContext.put(configName, configValue);
 	}
 
 	private int countRows(final String fullTableName) throws SQLException {
-		Preconditions.checkNotNull(fullTableName);
+		if (fullTableName == null){
+			throw new NullPointerException();
+		}
 		Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
 		final Connection conn = DriverManager.getConnection(getUrl(), props);
 		ResultSet rs = null;
@@ -526,7 +535,9 @@ public class JsonEventSerializerIT extends BaseHBaseManagedTimeIT {
 	}
 
 	private void dropTable(final String fullTableName) throws SQLException {
-		Preconditions.checkNotNull(fullTableName);
+		if (fullTableName == null){
+			throw new NullPointerException();
+		}
 		Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
 		final Connection conn = DriverManager.getConnection(getUrl(), props);
 		try {
diff --git a/phoenix-flume/src/it/java/org/apache/phoenix/flume/PhoenixSinkIT.java b/phoenix-flume/src/it/java/org/apache/phoenix/flume/PhoenixSinkIT.java
index 867d1ad..88e8678 100644
--- a/phoenix-flume/src/it/java/org/apache/phoenix/flume/PhoenixSinkIT.java
+++ b/phoenix-flume/src/it/java/org/apache/phoenix/flume/PhoenixSinkIT.java
@@ -80,7 +80,7 @@ public class PhoenixSinkIT extends BaseHBaseManagedTimeIT {
     
     
     
-    @Test(expected= NullPointerException.class)
+    @Test(expected = NullPointerException.class)
     public void testInvalidConfiguration () {
         
         sinkContext = new Context ();
diff --git a/phoenix-flume/src/it/java/org/apache/phoenix/flume/RegexEventSerializerIT.java b/phoenix-flume/src/it/java/org/apache/phoenix/flume/RegexEventSerializerIT.java
index 9548e65..92c85e1 100644
--- a/phoenix-flume/src/it/java/org/apache/phoenix/flume/RegexEventSerializerIT.java
+++ b/phoenix-flume/src/it/java/org/apache/phoenix/flume/RegexEventSerializerIT.java
@@ -26,6 +26,8 @@ import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -46,10 +48,6 @@ import org.apache.phoenix.flume.sink.PhoenixSink;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.junit.Test;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-
 
 public class RegexEventSerializerIT extends BaseHBaseManagedTimeIT {
 
@@ -175,7 +173,7 @@ public class RegexEventSerializerIT extends BaseHBaseManagedTimeIT {
         String col1 = "val1";
         String col2 = "val2";
         String eventBody = null;
-        List<Event> eventList = Lists.newArrayListWithCapacity(numEvents);
+        List<Event> eventList = new ArrayList<>(numEvents);
         for(int i = 0 ; i < eventList.size() ; i++) {
             eventBody = (col1 + i) + "\t" + (col2 + i);
             Event event = EventBuilder.withBody(Bytes.toBytes(eventBody));
@@ -309,10 +307,10 @@ public class RegexEventSerializerIT extends BaseHBaseManagedTimeIT {
         String hostHeader = "host1";
         String sourceHeader = "source1";
         String eventBody = null;
-        List<Event> eventList = Lists.newArrayListWithCapacity(numEvents);
+        List<Event> eventList = new ArrayList<>(numEvents);
         for(int i = 0 ; i < numEvents ; i++) {
             eventBody = (col1 + i) + "\t" + (col2 + i);
-            Map<String, String> headerMap = Maps.newHashMapWithExpectedSize(2);
+            Map<String, String> headerMap = new HashMap<>(2);
             headerMap.put("host",hostHeader);
             headerMap.put("source",sourceHeader);
             Event event = EventBuilder.withBody(Bytes.toBytes(eventBody),headerMap);
@@ -366,7 +364,9 @@ public class RegexEventSerializerIT extends BaseHBaseManagedTimeIT {
     }
     
     private void initSinkContextWithDefaults(final String fullTableName) {
-        Preconditions.checkNotNull(fullTableName);
+        if (fullTableName == null){
+            throw new NullPointerException();
+        }
         sinkContext = new Context ();
         String ddl = "CREATE TABLE " + fullTableName +
                 "  (flume_time timestamp not null, col1 varchar , col2 varchar" +
@@ -383,14 +383,22 @@ public class RegexEventSerializerIT extends BaseHBaseManagedTimeIT {
       }
     
     private void setConfig(final String configName , final String configValue) {
-        Preconditions.checkNotNull(sinkContext);
-        Preconditions.checkNotNull(configName);
-        Preconditions.checkNotNull(configValue);
+        if (sinkContext == null){
+            throw new NullPointerException();
+        }
+        if (configName == null){
+            throw new NullPointerException();
+        }
+        if (configValue == null){
+            throw new NullPointerException();
+        }
         sinkContext.put(configName, configValue);
     }
     
     private int countRows(final String fullTableName) throws SQLException {
-        Preconditions.checkNotNull(fullTableName);
+        if (fullTableName == null){
+            throw new NullPointerException();
+        }
         Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
         final Connection conn = DriverManager.getConnection(getUrl(), props);
         ResultSet rs = null ;
diff --git a/phoenix-flume/src/main/java/org/apache/phoenix/flume/SchemaHandler.java b/phoenix-flume/src/main/java/org/apache/phoenix/flume/SchemaHandler.java
index 8b14b64..206b3e5 100644
--- a/phoenix-flume/src/main/java/org/apache/phoenix/flume/SchemaHandler.java
+++ b/phoenix-flume/src/main/java/org/apache/phoenix/flume/SchemaHandler.java
@@ -23,25 +23,26 @@ import java.sql.SQLException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Preconditions;
-import com.google.common.base.Throwables;
-
 public class SchemaHandler {
-    
+
     private static final Logger logger = LoggerFactory.getLogger(SchemaHandler.class);
 
     public static boolean createTable(Connection connection, String createTableDdl) {
-        Preconditions.checkNotNull(connection);
-        Preconditions.checkNotNull(createTableDdl); 
+        if (connection == null) {
+            throw new NullPointerException();
+        }
+        if (createTableDdl == null) {
+            throw new NullPointerException();
+        }
         boolean status  = true;
         try {
             status = connection.createStatement().execute(createTableDdl);
         } catch (SQLException e) {
             logger.error("An error occurred during executing the create table ddl {} ",createTableDdl);
-            Throwables.propagate(e);
+            throw new RuntimeException(e);
         }
         return status;
-        
+
     }
 
 }
diff --git a/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/BaseEventSerializer.java b/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/BaseEventSerializer.java
index 24527e3..01836a9 100644
--- a/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/BaseEventSerializer.java
+++ b/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/BaseEventSerializer.java
@@ -27,7 +27,9 @@ import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -47,36 +49,30 @@ import org.apache.phoenix.util.StringUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Preconditions;
-import com.google.common.base.Splitter;
-import com.google.common.base.Strings;
-import com.google.common.base.Throwables;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
 
 public abstract class BaseEventSerializer implements EventSerializer {
 
     private static final Logger logger = LoggerFactory.getLogger(BaseEventSerializer.class);
-    
+
     protected Connection connection;
     protected String fullTableName;
     protected ColumnInfo[] columnMetadata;
     protected boolean autoGenerateKey = false;
     protected KeyGenerator  keyGenerator;
-    protected List<String>  colNames = Lists.newArrayListWithExpectedSize(10);
-    protected List<String>  headers  = Lists.newArrayListWithExpectedSize(5);
+    protected List<String>  colNames = new ArrayList<>(10);
+    protected List<String>  headers  = new ArrayList<>(5);
     protected String upsertStatement;
     private   String jdbcUrl;
     private   Integer batchSize;
     private   String  createTableDdl;
 
 
-    
-    
-    
+
+
+
     @Override
     public void configure(Context context) {
-        
+
         this.createTableDdl = context.getString(FlumeConstants.CONFIG_TABLE_DDL);
         this.fullTableName = context.getString(FlumeConstants.CONFIG_TABLE);
         final String zookeeperQuorum = context.getString(FlumeConstants.CONFIG_ZK_QUORUM);
@@ -85,70 +81,75 @@ public abstract class BaseEventSerializer implements EventSerializer {
         final String columnNames = context.getString(CONFIG_COLUMN_NAMES);
         final String headersStr = context.getString(CONFIG_HEADER_NAMES);
         final String keyGeneratorType = context.getString(CONFIG_ROWKEY_TYPE_GENERATOR);
-       
-        Preconditions.checkNotNull(this.fullTableName,"Table name cannot be empty, please specify in the configuration file");
-        if(!Strings.isNullOrEmpty(zookeeperQuorum)) {
+
+        if (this.fullTableName == null) {
+            throw new NullPointerException(
+                "Table name cannot be empty, please specify in the configuration file");
+        }
+        if(zookeeperQuorum != null && !zookeeperQuorum.isEmpty()) {
             this.jdbcUrl = QueryUtil.getUrl(zookeeperQuorum);
         }
-        if(!Strings.isNullOrEmpty(ipJdbcURL)) {
+        if(ipJdbcURL != null && !ipJdbcURL.isEmpty()) {
             this.jdbcUrl = ipJdbcURL;
         }
-        Preconditions.checkNotNull(this.jdbcUrl,"Please specify either the zookeeper quorum or the jdbc url in the configuration file");
-        Preconditions.checkNotNull(columnNames,"Column names cannot be empty, please specify in configuration file");
-        for(String s : Splitter.on(DEFAULT_COLUMNS_DELIMITER).split(columnNames)) {
-           colNames.add(s);
+        if (this.jdbcUrl == null) {
+            throw new NullPointerException(
+                "Please specify either the zookeeper quorum or the jdbc url in the configuration file");
         }
-        
-         if(!Strings.isNullOrEmpty(headersStr)) {
-            for(String s : Splitter.on(DEFAULT_COLUMNS_DELIMITER).split(headersStr)) {
-                headers.add(s);
-             }
+        if (columnNames == null) {
+            throw new NullPointerException(
+                "Column names cannot be empty, please specify in configuration file");
         }
-      
-        if(!Strings.isNullOrEmpty(keyGeneratorType)) {
+        colNames.addAll(Arrays.asList(columnNames.split(DEFAULT_COLUMNS_DELIMITER)));
+
+        if(headersStr != null && !headersStr.isEmpty()) {
+            headers.addAll(Arrays.asList(headersStr.split(DEFAULT_COLUMNS_DELIMITER)));
+        }
+
+        if(keyGeneratorType != null && !keyGeneratorType.isEmpty()) {
             try {
                 keyGenerator =  DefaultKeyGenerator.valueOf(keyGeneratorType.toUpperCase());
                 this.autoGenerateKey = true;
             } catch(IllegalArgumentException iae) {
                 logger.error("An invalid key generator {} was specified in configuration file. Specify one of {}",keyGeneratorType,DefaultKeyGenerator.values());
-                Throwables.propagate(iae);
-            } 
+                throw new RuntimeException(iae);
+            }
         }
-        
+
         logger.debug(" the jdbcUrl configured is {}",jdbcUrl);
         logger.debug(" columns configured are {}",colNames.toString());
         logger.debug(" headers configured are {}",headersStr);
         logger.debug(" the keyGenerator configured is {} ",keyGeneratorType);
 
         doConfigure(context);
-        
+
     }
-    
+
     @Override
     public void configure(ComponentConfiguration conf) {
         // NO-OP
-        
+
     }
-    
-    
+
+
     @Override
     public void initialize() throws SQLException {
         final Properties props = new Properties();
-        props.setProperty(UPSERT_BATCH_SIZE_ATTRIB, String.valueOf(this.batchSize)); 
+        props.setProperty(UPSERT_BATCH_SIZE_ATTRIB, String.valueOf(this.batchSize));
         ResultSet rs = null;
         try {
             this.connection = DriverManager.getConnection(this.jdbcUrl, props);
             this.connection.setAutoCommit(false);
             if(this.createTableDdl != null) {
-                 SchemaHandler.createTable(connection,createTableDdl);
+                SchemaHandler.createTable(connection,createTableDdl);
             }
-      
-            
-            final Map<String,Integer> qualifiedColumnMap = Maps.newLinkedHashMap();
-            final Map<String,Integer> unqualifiedColumnMap = Maps.newLinkedHashMap();
+
+
+            final Map<String,Integer> qualifiedColumnMap = new LinkedHashMap<>();
+            final Map<String,Integer> unqualifiedColumnMap = new LinkedHashMap<>();
             final String schemaName = SchemaUtil.getSchemaNameFromFullName(fullTableName);
             final String tableName  = SchemaUtil.getTableNameFromFullName(fullTableName);
-            
+
             String rowkey = null;
             String  cq = null;
             String  cf = null;
@@ -160,42 +161,42 @@ public abstract class BaseEventSerializer implements EventSerializer {
                 // TODO: Fix this .. change `DATA_TYPE_POSITION` value 5 to 26
                 // dt = rs.getInt(QueryUtil.DATA_TYPE_POSITION);
                 dt = rs.getInt(26);
-                if(Strings.isNullOrEmpty(cf)) {
+                if(cf == null || cf.isEmpty()) {
                     rowkey = cq; // this is required only when row key is auto generated
                 } else {
                     qualifiedColumnMap.put(SchemaUtil.getColumnDisplayName(cf, cq), dt);
                 }
                 unqualifiedColumnMap.put(SchemaUtil.getColumnDisplayName(null, cq), dt);
-             }
-            
+            }
+
             //can happen when table not found in Hbase.
             if(unqualifiedColumnMap.isEmpty()) {
                 throw new SQLExceptionInfo.Builder(SQLExceptionCode.TABLE_UNDEFINED)
-                        .setTableName(tableName).build().buildException();
+                    .setTableName(tableName).build().buildException();
             }
-       
+
             int colSize = colNames.size();
             int headersSize = headers.size();
             int totalSize = colSize + headersSize + ( autoGenerateKey ? 1 : 0);
             columnMetadata = new ColumnInfo[totalSize] ;
-           
+
             int position = 0;
             position = this.addToColumnMetadataInfo(colNames, qualifiedColumnMap, unqualifiedColumnMap, position);
             position = this.addToColumnMetadataInfo(headers,  qualifiedColumnMap, unqualifiedColumnMap, position);
-           
+
             if(autoGenerateKey) {
                 Integer sqlType = unqualifiedColumnMap.get(rowkey);
                 if (sqlType == null) {
                     throw new SQLExceptionInfo.Builder(SQLExceptionCode.PRIMARY_KEY_MISSING)
-                         .setColumnName(rowkey).setTableName(fullTableName).build().buildException();
+                        .setColumnName(rowkey).setTableName(fullTableName).build().buildException();
                 }
                 columnMetadata[position] = new ColumnInfo(rowkey, sqlType);
                 position++;
             }
-            
+
             this.upsertStatement = QueryUtil.constructUpsertStatement(fullTableName, Arrays.asList(columnMetadata));
             logger.info(" the upsert statement is {} " ,this.upsertStatement);
-            
+
         }  catch (SQLException e) {
             logger.error("error {} occurred during initializing connection ",e.getMessage());
             throw e;
@@ -206,40 +207,46 @@ public abstract class BaseEventSerializer implements EventSerializer {
         }
         doInitialize();
     }
-    
+
     private int addToColumnMetadataInfo(final List<String> columns , final Map<String,Integer> qualifiedColumnsInfoMap, Map<String, Integer> unqualifiedColumnsInfoMap, int position) throws SQLException {
-        Preconditions.checkNotNull(columns);
-        Preconditions.checkNotNull(qualifiedColumnsInfoMap);
-        Preconditions.checkNotNull(unqualifiedColumnsInfoMap);
-       for (int i = 0 ; i < columns.size() ; i++) {
+        if (columns == null) {
+            throw new NullPointerException();
+        }
+        if (qualifiedColumnsInfoMap == null) {
+            throw new NullPointerException();
+        }
+        if (unqualifiedColumnsInfoMap == null) {
+            throw new NullPointerException();
+        }
+        for (int i = 0 ; i < columns.size() ; i++) {
             String columnName = SchemaUtil.normalizeIdentifier(columns.get(i).trim());
             Integer sqlType = unqualifiedColumnsInfoMap.get(columnName);
             if (sqlType == null) {
                 sqlType = qualifiedColumnsInfoMap.get(columnName);
                 if (sqlType == null) {
-                   throw new SQLExceptionInfo.Builder(SQLExceptionCode.COLUMN_NOT_FOUND)
+                    throw new SQLExceptionInfo.Builder(SQLExceptionCode.COLUMN_NOT_FOUND)
                         .setColumnName(columnName).setTableName(this.fullTableName).build().buildException();
                 }
             }
             columnMetadata[position] = new ColumnInfo(columnName, sqlType);
             position++;
-       }
-       return position;
+        }
+        return position;
     }
-    
+
     public abstract void doConfigure(Context context);
-    
+
     public abstract void doInitialize() throws SQLException;
-    
-    
+
+
     @Override
     public void close() {
         if(connection != null) {
             try {
-               connection.close();
-         } catch (SQLException e) {
-            logger.error(" Error while closing connection {} ");
-         }
-       }
+                connection.close();
+            } catch (SQLException e) {
+                logger.error(" Error while closing connection {} ");
+            }
+        }
     }
 }
diff --git a/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/CsvEventSerializer.java b/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/CsvEventSerializer.java
index a856c3e..53d08f7 100644
--- a/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/CsvEventSerializer.java
+++ b/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/CsvEventSerializer.java
@@ -37,6 +37,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import com.google.common.collect.Iterables;
 import org.apache.commons.csv.CSVFormat;
 import org.apache.commons.csv.CSVParser;
 import org.apache.commons.csv.CSVRecord;
@@ -48,8 +49,7 @@ import org.json.JSONTokener;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Iterables;
+
 
 public class CsvEventSerializer extends BaseEventSerializer {
 
@@ -84,9 +84,15 @@ public class CsvEventSerializer extends BaseEventSerializer {
 
 	@Override
 	public void upsertEvents(List<Event> events) throws SQLException {
-		Preconditions.checkNotNull(events);
-		Preconditions.checkNotNull(connection);
-		Preconditions.checkNotNull(this.upsertStatement);
+		if(events == null){
+			throw new NullPointerException();
+		}
+		if(connection == null){
+			throw new NullPointerException();
+		}
+		if(this.upsertStatement == null){
+			throw new NullPointerException();
+		}
 
 		boolean wasAutoCommit = connection.getAutoCommit();
 		connection.setAutoCommit(false);
@@ -189,7 +195,15 @@ public class CsvEventSerializer extends BaseEventSerializer {
 
 		public CSVRecord parse(String input) throws IOException {
 			CSVParser csvParser = new CSVParser(new StringReader(input), this.csvFormat);
-			return Iterables.getFirst(csvParser, null);
+			CSVRecord record;
+			try{
+				record = csvParser.iterator().next();
+			} catch (Exception e) {
+				record = null;
+			}
+
+			return record;
+
 		}
 	}
 
diff --git a/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/JsonEventSerializer.java b/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/JsonEventSerializer.java
index 9226017..24ebea8 100644
--- a/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/JsonEventSerializer.java
+++ b/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/JsonEventSerializer.java
@@ -42,7 +42,6 @@ import org.json.JSONTokener;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Preconditions;
 import com.jayway.jsonpath.Configuration;
 import com.jayway.jsonpath.JsonPath;
 import com.jayway.jsonpath.spi.json.JsonOrgJsonProvider;
@@ -94,11 +93,18 @@ public class JsonEventSerializer extends BaseEventSerializer {
 
 	@Override
 	public void upsertEvents(List<Event> events) throws SQLException {
-		Preconditions.checkNotNull(events);
-		Preconditions.checkNotNull(connection);
-		Preconditions.checkNotNull(this.upsertStatement);
-		Preconditions.checkArgument(isProperMapping, "Please verify fields mapping is not properly done..");
-
+		if (events == null){
+			throw new NullPointerException();
+		}
+		if (connection == null){
+			throw new NullPointerException();
+		}
+		if (this.upsertStatement == null){
+			throw new NullPointerException();
+		}
+		if (!isProperMapping) {
+			throw new IllegalArgumentException("Please verify fields mapping is not properly done..");
+		}
 		boolean wasAutoCommit = connection.getAutoCommit();
 		connection.setAutoCommit(false);
 		try (PreparedStatement colUpsert = connection.prepareStatement(upsertStatement)) {
diff --git a/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/RegexEventSerializer.java b/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/RegexEventSerializer.java
index b636481..bcf7bed 100644
--- a/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/RegexEventSerializer.java
+++ b/phoenix-flume/src/main/java/org/apache/phoenix/flume/serializer/RegexEventSerializer.java
@@ -35,7 +35,6 @@ import org.apache.phoenix.schema.types.PDataType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Preconditions;
 
 public class RegexEventSerializer extends BaseEventSerializer {
 
@@ -65,10 +64,16 @@ public class RegexEventSerializer extends BaseEventSerializer {
    
     @Override
     public void upsertEvents(List<Event> events) throws SQLException {
-       Preconditions.checkNotNull(events);
-       Preconditions.checkNotNull(connection);
-       Preconditions.checkNotNull(this.upsertStatement);
-       
+        if (events == null){
+            throw new NullPointerException();
+        }
+        if (connection == null){
+            throw new NullPointerException();
+        }
+        if (this.upsertStatement == null){
+            throw new NullPointerException();
+        }
+
        boolean wasAutoCommit = connection.getAutoCommit();
        connection.setAutoCommit(false);
        try (PreparedStatement colUpsert = connection.prepareStatement(upsertStatement)) {
diff --git a/phoenix-flume/src/main/java/org/apache/phoenix/flume/sink/PhoenixSink.java b/phoenix-flume/src/main/java/org/apache/phoenix/flume/sink/PhoenixSink.java
index 2b102a2..656ff43 100644
--- a/phoenix-flume/src/main/java/org/apache/phoenix/flume/sink/PhoenixSink.java
+++ b/phoenix-flume/src/main/java/org/apache/phoenix/flume/sink/PhoenixSink.java
@@ -18,6 +18,7 @@
 package org.apache.phoenix.flume.sink;
 
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
@@ -37,10 +38,6 @@ import org.apache.phoenix.flume.serializer.EventSerializers;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Joiner;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Throwables;
-import com.google.common.collect.Lists;
 
 public class PhoenixSink extends AbstractSink implements Configurable {
     private static final Logger logger = LoggerFactory.getLogger(PhoenixSink.class);
@@ -60,7 +57,9 @@ public class PhoenixSink extends AbstractSink implements Configurable {
         this.batchSize = context.getInteger(FlumeConstants.CONFIG_BATCHSIZE, FlumeConstants.DEFAULT_BATCH_SIZE);
         final String eventSerializerType = context.getString(FlumeConstants.CONFIG_SERIALIZER);
         
-        Preconditions.checkNotNull(eventSerializerType,"Event serializer cannot be empty, please specify in the configuration file");
+        if (eventSerializerType == null) {
+          throw new NullPointerException("Event serializer cannot be empty, please specify in the configuration file");
+        }
         initializeSerializer(context,eventSerializerType);
         this.sinkCounter = new SinkCounter(this.getName());
     }
@@ -98,7 +97,12 @@ public class PhoenixSink extends AbstractSink implements Configurable {
          
        } catch (Exception e) {
          logger.error("Could not instantiate event serializer." , e);
-         Throwables.propagate(e);
+         if (e instanceof RuntimeException){
+           throw (RuntimeException) e;
+         }
+         else {
+           throw new RuntimeException(e);
+         }
        }
     }
 
@@ -121,7 +125,12 @@ public class PhoenixSink extends AbstractSink implements Configurable {
         } catch(Exception ex) {
             sinkCounter.incrementConnectionFailedCount();
             logger.error("Error {} in initializing the serializer.",ex.getMessage());
-            Throwables.propagate(ex);
+          if (ex instanceof RuntimeException){
+            throw (RuntimeException) ex;
+          }
+          else {
+            throw new RuntimeException(ex);
+          }
        }
        super.start();
     }
@@ -144,7 +153,7 @@ public class PhoenixSink extends AbstractSink implements Configurable {
         Status status = Status.READY;
         Channel channel = getChannel();
         Transaction transaction = null;
-        List<Event>  events = Lists.newArrayListWithExpectedSize(this.batchSize); 
+        List<Event>  events = new ArrayList<>(this.batchSize);
         long startTime = System.nanoTime();
         try {
             transaction = channel.getTransaction();
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
index b4c4e46..6f646ac 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
@@ -17,7 +17,6 @@
  */
 package org.apache.phoenix.hive;
 
-import com.google.common.collect.ImmutableList;
 import junit.framework.Assert;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
@@ -72,16 +71,7 @@ import java.io.OutputStreamWriter;
 import java.io.PrintStream;
 import java.io.StringWriter;
 import java.net.URL;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Deque;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.UUID;
+import java.util.*;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
@@ -1160,7 +1150,7 @@ public class HiveTestUtil {
     }
 
     private static String[] cachedQvFileList = null;
-    private static ImmutableList<String> cachedDefaultQvFileList = null;
+    private static List<String> cachedDefaultQvFileList = null;
     private static Pattern qvSuffix = Pattern.compile("_[0-9]+.qv$", Pattern.CASE_INSENSITIVE);
 
     public static List<String> getVersionFiles(String queryDir, String tname) {
@@ -1186,7 +1176,7 @@ public class HiveTestUtil {
         Arrays.sort(cachedQvFileList, String.CASE_INSENSITIVE_ORDER);
         List<String> defaults = getVersionFilesInternal("default");
         cachedDefaultQvFileList = (defaults != null)
-                ? ImmutableList.copyOf(defaults) : ImmutableList.<String>of();
+                ? Collections.unmodifiableList(new ArrayList<String>(defaults)) : Collections.unmodifiableList(new ArrayList<String>());
     }
 
     private static List<String> getVersionFilesInternal(String tname) {
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
index c35634a..bac2449 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
@@ -17,9 +17,7 @@
  */
 package org.apache.phoenix.hive;
 
-import com.google.common.base.CharMatcher;
-import com.google.common.base.Splitter;
-import com.google.common.collect.Lists;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.metastore.HiveMetaHook;
@@ -34,10 +32,7 @@ import org.apache.phoenix.hive.util.PhoenixUtil;
 
 import java.sql.Connection;
 import java.sql.SQLException;
-import java.util.Collections;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 
 import static org.apache.phoenix.hive.util.ColumnMappingUtils.getColumnMappingMap;
 
@@ -52,7 +47,7 @@ public class PhoenixMetaHook implements HiveMetaHook {
     @Override
     public void preCreateTable(Table table) throws MetaException {
         if (LOG.isDebugEnabled()) {
-            LOG.debug("Precreate  table : " + table.getTableName());
+            LOG.debug("Precreate table : " + table.getTableName());
         }
 
         try (Connection conn = PhoenixConnectionUtil.getConnection(table)) {
@@ -94,8 +89,10 @@ public class PhoenixMetaHook implements HiveMetaHook {
         String phoenixRowKeys = tableParameterMap.get(PhoenixStorageHandlerConstants
                 .PHOENIX_ROWKEYS);
         StringBuilder realRowKeys = new StringBuilder();
-        List<String> phoenixRowKeyList = Lists.newArrayList(Splitter.on
-                (PhoenixStorageHandlerConstants.COMMA).trimResults().split(phoenixRowKeys));
+        List<String> phoenixRowKeyList = new ArrayList<>();
+        for (String key:phoenixRowKeys.split(PhoenixStorageHandlerConstants.COMMA)) {
+            phoenixRowKeyList.add(key.trim());
+        }
         Map<String, String> columnMappingMap = getColumnMappingMap(tableParameterMap.get
                 (PhoenixStorageHandlerConstants.PHOENIX_COLUMN_MAPPING));
 
@@ -116,8 +113,11 @@ public class PhoenixMetaHook implements HiveMetaHook {
                 if ("binary".equals(columnType)) {
                     // Phoenix must define max length of binary when type definition. Obtaining
                     // information from the column mapping. ex) phoenix.rowkeys = "r1, r2(100), ..."
-                    List<String> tokenList = Lists.newArrayList(Splitter.on(CharMatcher.is('(')
-                            .or(CharMatcher.is(')'))).trimResults().split(rowKeyName));
+                    List<String> tokenList =
+                       new ArrayList<>();
+                    for (String name: rowKeyName.split("\\(|\\)")) {
+                        tokenList.add(name.trim());
+                    }
                     columnType = columnType + "(" + tokenList.get(1) + ")";
                     rowKeyName = tokenList.get(0);
                 }
@@ -137,8 +137,10 @@ public class PhoenixMetaHook implements HiveMetaHook {
                 if ("binary".equals(columnType)) {
                     // Phoenix must define max length of binary when type definition. Obtaining
                     // information from the column mapping. ex) phoenix.column.mapping=c1:c1(100)
-                    List<String> tokenList = Lists.newArrayList(Splitter.on(CharMatcher.is('(')
-                            .or(CharMatcher.is(')'))).trimResults().split(columnName));
+                    List<String> tokenList = new ArrayList<>();
+                    for(String name: columnName.split("\\(|\\)")){
+                        tokenList.add(name.trim());
+                    }
                     columnType = columnType + "(" + tokenList.get(1) + ")";
                     columnName = tokenList.get(0);
                 }
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixRow.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixRow.java
index fa307ce..cae8f6c 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixRow.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixRow.java
@@ -17,9 +17,9 @@
  */
 package org.apache.phoenix.hive;
 
-import com.google.common.collect.Lists;
 import org.apache.hadoop.hive.serde2.StructObject;
 
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
@@ -53,7 +53,7 @@ public class PhoenixRow implements StructObject {
      */
     @Override
     public List<Object> getFieldsAsList() {
-        return Lists.newArrayList(resultRowMap.values());
+        return new ArrayList<>(resultRowMap.values());
     }
 
 
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixSerDe.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixSerDe.java
index 9ef0158..9b5083d 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixSerDe.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixSerDe.java
@@ -17,8 +17,6 @@
  */
 package org.apache.phoenix.hive;
 
-import com.google.common.base.Function;
-import com.google.common.collect.Lists;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -38,6 +36,7 @@ import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
 import org.apache.phoenix.hive.mapreduce.PhoenixResultWritable;
 import org.apache.phoenix.hive.objectinspector.PhoenixObjectInspectorFactory;
 
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
@@ -137,8 +136,7 @@ public class PhoenixSerDe extends AbstractSerDe {
         List<TypeInfo> columnTypeList = TypeInfoUtils.getTypeInfosFromTypeString(tbl.getProperty
                 (serdeConstants.LIST_COLUMN_TYPES));
 
-        List<ObjectInspector> columnObjectInspectors = Lists.newArrayListWithExpectedSize
-                (columnTypeList.size());
+        List<ObjectInspector> columnObjectInspectors = new ArrayList<>(columnTypeList.size());
 
         for (TypeInfo typeInfo : columnTypeList) {
             columnObjectInspectors.add(PhoenixObjectInspectorFactory.createObjectInspector
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
index 6e8e8ec..7584e0d 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
@@ -17,11 +17,10 @@
  */
 package org.apache.phoenix.hive.mapreduce;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 import java.io.IOException;
 import java.sql.Connection;
 import java.sql.Statement;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
@@ -123,9 +122,12 @@ public class PhoenixInputFormat<T extends DBWritable> implements InputFormat<Wri
     private List<InputSplit> generateSplits(final JobConf jobConf, final QueryPlan qplan,
                                             final List<KeyRange> splits, String query) throws
             IOException {
-        Preconditions.checkNotNull(qplan);
-        Preconditions.checkNotNull(splits);
-        final List<InputSplit> psplits = Lists.newArrayListWithExpectedSize(splits.size());
+        if (qplan == null){
+            throw new NullPointerException();
+        }if (splits == null){
+            throw new NullPointerException();
+        }
+        final List<InputSplit> psplits = new ArrayList<>(splits.size());
 
         Path[] tablePaths = FileInputFormat.getInputPaths(ShimLoader.getHadoopShims()
                 .newJobContext(new Job(jobConf)));
@@ -159,7 +161,7 @@ public class PhoenixInputFormat<T extends DBWritable> implements InputFormat<Wri
                                 .getBatch() + "] and  regionLocation : " + regionLocation);
                     }
 
-                    inputSplit = new PhoenixInputSplit(Lists.newArrayList(aScan), tablePaths[0],
+                    inputSplit = new PhoenixInputSplit(new ArrayList<>(Arrays.asList(aScan)), tablePaths[0],
                             regionLocation, regionSize);
                     inputSplit.setQuery(query);
                     psplits.add(inputSplit);
@@ -234,7 +236,9 @@ public class PhoenixInputFormat<T extends DBWritable> implements InputFormat<Wri
             }
             final Connection connection = PhoenixConnectionUtil.getInputConnection(configuration,
                     overridingProps);
-            Preconditions.checkNotNull(selectStatement);
+            if (selectStatement == null) {
+                throw new NullPointerException();
+            }
             final Statement statement = connection.createStatement();
             final PhoenixStatement pstmt = statement.unwrap(PhoenixStatement.class);
 
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputSplit.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputSplit.java
index d76e863..71abbdb 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputSplit.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputSplit.java
@@ -17,8 +17,6 @@
  */
 package org.apache.phoenix.hive.mapreduce;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@@ -31,6 +29,7 @@ import org.apache.phoenix.query.KeyRange;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.List;
 
 /**
@@ -55,8 +54,12 @@ public class PhoenixInputSplit extends FileSplit implements InputSplit {
 
         regionSize = length;
 
-        Preconditions.checkNotNull(scans);
-        Preconditions.checkState(!scans.isEmpty());
+        if(scans == null) {
+            throw new NullPointerException();
+        }
+        if (scans.isEmpty()) {
+            throw new IllegalStateException();
+        }
         this.scans = scans;
         init();
     }
@@ -86,7 +89,9 @@ public class PhoenixInputSplit extends FileSplit implements InputSplit {
     public void write(DataOutput out) throws IOException {
         super.write(out);
 
-        Preconditions.checkNotNull(scans);
+        if (scans == null) {
+            throw new NullPointerException();
+        }
         WritableUtils.writeVInt(out, scans.size());
         for (Scan scan : scans) {
             ClientProtos.Scan protoScan = ProtobufUtil.toScan(scan);
@@ -104,7 +109,7 @@ public class PhoenixInputSplit extends FileSplit implements InputSplit {
         super.readFields(in);
 
         int count = WritableUtils.readVInt(in);
-        scans = Lists.newArrayListWithExpectedSize(count);
+        scans = new ArrayList<>(count);
         for (int i = 0; i < count; i++) {
             byte[] protoScanBytes = new byte[WritableUtils.readVInt(in)];
             in.readFully(protoScanBytes);
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java
index 2ff8aca..734f021 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java
@@ -19,6 +19,7 @@ package org.apache.phoenix.hive.mapreduce;
 
 import java.io.IOException;
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.commons.logging.Log;
@@ -49,8 +50,6 @@ import org.apache.phoenix.jdbc.PhoenixResultSet;
 import org.apache.phoenix.monitoring.ReadMetricQueue;
 import org.apache.phoenix.monitoring.ScanMetricsHolder;
 
-import com.google.common.base.Throwables;
-import com.google.common.collect.Lists;
 
 /**
  * @RecordReader implementation that iterates over the the records.
@@ -105,7 +104,7 @@ public class PhoenixRecordReader<T extends DBWritable> implements
         }
 
         try {
-            List<PeekingResultIterator> iterators = Lists.newArrayListWithExpectedSize(scans.size
+            List<PeekingResultIterator> iterators = new ArrayList<>(scans.size
                     ());
             StatementContext ctx = queryPlan.getContext();
             ReadMetricQueue readMetrics = ctx.getReadMetricsQueue();
@@ -140,7 +139,7 @@ public class PhoenixRecordReader<T extends DBWritable> implements
         } catch (SQLException e) {
             LOG.error(String.format(" Error [%s] initializing PhoenixRecordReader. ", e
                     .getMessage()));
-            Throwables.propagate(e);
+            throw new RuntimeException(e);
         }
     }
 
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixResultWritable.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixResultWritable.java
index 2bdc7b2..f322767 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixResultWritable.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixResultWritable.java
@@ -17,8 +17,6 @@
  */
 package org.apache.phoenix.hive.mapreduce;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configurable;
@@ -39,6 +37,9 @@ import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
@@ -52,14 +53,14 @@ public class PhoenixResultWritable implements Writable, DBWritable, Configurable
 
     private List<ColumnInfo> columnMetadataList;
     private List<Object> valueList;    // for output
-    private Map<String, Object> rowMap = Maps.newHashMap();  // for input
+    private Map<String, Object> rowMap = new HashMap<>();  // for input
     private Map<String, String> columnMap;
 
     private int columnCount = -1;
 
     private Configuration config;
     private boolean isTransactional;
-    private Map<String, Object> rowKeyMap = Maps.newLinkedHashMap();
+    private Map<String, Object> rowKeyMap = new LinkedHashMap();
     private List<String> primaryKeyColumnList;
 
     public PhoenixResultWritable() {
@@ -73,7 +74,7 @@ public class PhoenixResultWritable implements Writable, DBWritable, Configurable
             throws IOException {
         this(config);
         this.columnMetadataList = columnMetadataList;
-        valueList = Lists.newArrayListWithExpectedSize(columnMetadataList.size());
+        valueList = new ArrayList<>(columnMetadataList.size());
     }
 
     @Override
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixListObjectInspector.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixListObjectInspector.java
index c4f2d51..07cee37 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixListObjectInspector.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixListObjectInspector.java
@@ -17,12 +17,12 @@
  */
 package org.apache.phoenix.hive.objectinspector;
 
-import com.google.common.collect.Lists;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.phoenix.schema.types.PhoenixArray;
 
+import java.util.ArrayList;
 import java.util.List;
 
 /**
@@ -86,7 +86,7 @@ public class PhoenixListObjectInspector implements ListObjectInspector {
 
         PhoenixArray array = (PhoenixArray) data;
         int valueLength = array.getDimensions();
-        List<Object> valueList = Lists.newArrayListWithExpectedSize(valueLength);
+        List<Object> valueList = new ArrayList<>(valueLength);
 
         for (int i = 0; i < valueLength; i++) {
             valueList.add(array.getElement(i));
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/IndexPredicateAnalyzer.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/IndexPredicateAnalyzer.java
index 659983a..8ffb5fa 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/IndexPredicateAnalyzer.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/IndexPredicateAnalyzer.java
@@ -17,7 +17,6 @@
  */
 package org.apache.phoenix.hive.ql.index;
 
-import com.google.common.collect.Lists;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -244,7 +243,7 @@ public class IndexPredicateAnalyzer {
         String[] fields = null;
 
         if (LOG.isTraceEnabled()) {
-            LOG.trace("Processing In Operator. nodeOutputs : " + Lists.newArrayList(nodeOutputs));
+            LOG.trace("Processing In Operator. nodeOutputs : " + new ArrayList<>(Arrays.asList(nodeOutputs)));
         }
 
         if (nodeOutputs[0] instanceof ExprNodeFieldDesc) {
@@ -261,7 +260,7 @@ public class IndexPredicateAnalyzer {
 
             if (LOG.isTraceEnabled()) {
                 LOG.trace("nodeOutputs[0] : " + nodeOutputs[0] + ", nodeOutputs[1] : " +
-                        nodeOutputs[1] + " => " + Lists.newArrayList(extracted));
+                        nodeOutputs[1] + " => " + new ArrayList<>(Arrays.asList(extracted)));
             }
 
             columnDesc = (ExprNodeColumnDesc) extracted[0];
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/query/PhoenixQueryBuilder.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/query/PhoenixQueryBuilder.java
index 0cd9d0a..0ab78ba 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/query/PhoenixQueryBuilder.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/query/PhoenixQueryBuilder.java
@@ -17,18 +17,15 @@
  */
 package org.apache.phoenix.hive.query;
 
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
-import com.google.common.base.Predicate;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-import javax.annotation.Nullable;
+import java.util.stream.Collectors;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
@@ -44,7 +41,7 @@ import org.apache.phoenix.util.StringUtil;
 import static org.apache.phoenix.hive.util.ColumnMappingUtils.getColumnMappingMap;
 
 /**
- * Query builder. Produces a query depending on the colummn list and conditions
+ * Query builder. Produces a query depending on the column list and conditions
  */
 
 public class PhoenixQueryBuilder {
@@ -96,7 +93,7 @@ public class PhoenixQueryBuilder {
         Map<String, String> columnMappingMap = getColumnMappingMap(jobConf.get
                 (PhoenixStorageHandlerConstants.PHOENIX_COLUMN_MAPPING));
         if(columnMappingMap != null) {
-          List<String> newList = Lists.newArrayList();
+          List<String> newList = new ArrayList<>();
             for(String column:columnList) {
                 if(columnMappingMap.containsKey(column)) {
                     newList.add(columnMappingMap.get(column));
@@ -137,8 +134,8 @@ public class PhoenixQueryBuilder {
 
     private String getSelectColumns(JobConf jobConf, String tableName, List<String>
             readColumnList) throws IOException {
-        String selectColumns = Joiner.on(PhoenixStorageHandlerConstants.COMMA).join(ColumnMappingUtils.quoteColumns(readColumnList));
-
+        String selectColumns = String.join(PhoenixStorageHandlerConstants.COMMA,
+            ColumnMappingUtils.quoteColumns(readColumnList));
         if (PhoenixStorageHandlerConstants.EMPTY_STRING.equals(selectColumns)) {
             selectColumns = "*";
         } else {
@@ -168,7 +165,7 @@ public class PhoenixQueryBuilder {
                     tableName + " search conditions : " + searchConditions + "  hints : " + hints);
         }
 
-        return makeQueryString(jobConf, tableName, Lists.newArrayList(readColumnList),
+        return makeQueryString(jobConf, tableName,  new ArrayList<>(readColumnList),
                 searchConditions, QUERY_TEMPLATE, hints);
     }
 
@@ -196,7 +193,7 @@ public class PhoenixQueryBuilder {
             return Collections.emptyList();
         }
 
-        List<String> columns = Lists.newArrayList();
+        List<String> columns = new ArrayList<>();
         sql.append(" where ");
 
         Iterator<IndexSearchCondition> iter = conditions.iterator();
@@ -225,18 +222,28 @@ public class PhoenixQueryBuilder {
     }
 
     private Expression findExpression(final IndexSearchCondition condition) {
-        return Iterables.tryFind(Arrays.asList(Expression.values()), new Predicate<Expression>() {
-            @Override
-            public boolean apply(@Nullable Expression expr) {
-                return expr.isFor(condition);
+        for (Expression exp:Expression.values()) {
+            if(exp.isFor(condition)){
+                return exp;
             }
-        }).orNull();
+        }
+        return null;
     }
 
-    private static final Joiner JOINER_COMMA = Joiner.on(", ");
-    private static final Joiner JOINER_AND = Joiner.on(" and ");
-    private static final Joiner JOINER_SPACE = Joiner.on(" ");
+    private static final StrJoiner JOINER_COMMA = new StrJoiner(", ");
+    private static final StrJoiner JOINER_AND = new StrJoiner(" and ");
+    private static final StrJoiner JOINER_SPACE = new StrJoiner(" ");
 
+    private static class StrJoiner{
+        private String delimiter;
+
+        StrJoiner(String delimiter){
+            this.delimiter = delimiter;
+        }
+        public String join(List<String> list){
+            return String.join(this.delimiter,list);
+        }
+    }
     private enum Expression {
         EQUAL("UDFOPEqual", "="),
         GREATER_THAN_OR_EQUAL_TO("UDFOPEqualOrGreaterThan", ">="),
@@ -244,12 +251,12 @@ public class PhoenixQueryBuilder {
         LESS_THAN_OR_EQUAL_TO("UDFOPEqualOrLessThan", "<="),
         LESS_THAN("UDFOPLessThan", "<"),
         NOT_EQUAL("UDFOPNotEqual", "!="),
-        BETWEEN("GenericUDFBetween", "between", JOINER_AND, true) {
+        BETWEEN("GenericUDFBetween", "between", JOINER_AND,true) {
             public boolean checkCondition(IndexSearchCondition condition) {
                 return condition.getConstantDescs() != null;
             }
         },
-        IN("GenericUDFIn", "in", JOINER_COMMA, true) {
+        IN("GenericUDFIn", "in", JOINER_COMMA,true) {
             public boolean checkCondition(IndexSearchCondition condition) {
                 return condition.getConstantDescs() != null;
             }
@@ -271,18 +278,18 @@ public class PhoenixQueryBuilder {
 
         private final String hiveCompOp;
         private final String sqlCompOp;
-        private final Joiner joiner;
+        private final StrJoiner joiner;
         private final boolean supportNotOperator;
 
         Expression(String hiveCompOp, String sqlCompOp) {
-            this(hiveCompOp, sqlCompOp, null);
+            this(hiveCompOp, sqlCompOp, null,null);
         }
 
-        Expression(String hiveCompOp, String sqlCompOp, Joiner joiner) {
-            this(hiveCompOp, sqlCompOp, joiner, false);
+        Expression(String hiveCompOp, String sqlCompOp, StrJoiner joiner, String joiner2) {
+            this(hiveCompOp, sqlCompOp, joiner,false);
         }
 
-        Expression(String hiveCompOp, String sqlCompOp, Joiner joiner, boolean supportNotOp) {
+        Expression(String hiveCompOp, String sqlCompOp, StrJoiner joiner, boolean supportNotOp) {
             this.hiveCompOp = hiveCompOp;
             this.sqlCompOp = sqlCompOp;
             this.joiner = joiner;
@@ -304,7 +311,7 @@ public class PhoenixQueryBuilder {
             if(rColumn != null) {
                 column = rColumn;
             }
-            return JOINER_SPACE.join(
+            return String.join(" ",
                     "\"" + column + "\"",
                     getSqlCompOpString(condition),
                     joiner != null ? createConstants(type, condition.getConstantDescs()) :
@@ -328,16 +335,11 @@ public class PhoenixQueryBuilder {
             if (constantDesc == null) {
                 return StringUtil.EMPTY_STRING;
             }
-
-            return joiner.join(Iterables.transform(Arrays.asList(constantDesc),
-                    new Function<ExprNodeConstantDesc, String>() {
-                        @Nullable
-                        @Override
-                        public String apply(@Nullable ExprNodeConstantDesc desc) {
-                            return createConstantString(typeName, String.valueOf(desc.getValue()));
-                        }
-                    }
-            ));
+            List<String> constants = new ArrayList<>();
+            for (ExprNodeConstantDesc s:constantDesc) {
+                constants.add(createConstantString(typeName, String.valueOf(s.getValue())));
+            }
+            return joiner.join(constants);
         }
 
         private static class ConstantStringWrapper {
@@ -346,7 +348,7 @@ public class PhoenixQueryBuilder {
             private String postfix;
 
             ConstantStringWrapper(String type, String prefix, String postfix) {
-                this(Lists.newArrayList(type), prefix, postfix);
+                this(new ArrayList<>(Arrays.asList(type)), prefix, postfix);
             }
 
             ConstantStringWrapper(List<String> types, String prefix, String postfix) {
@@ -356,25 +358,26 @@ public class PhoenixQueryBuilder {
             }
 
             public String apply(final String typeName, String value) {
-                return Iterables.any(types, new Predicate<String>() {
-
-                    @Override
-                    public boolean apply(@Nullable String type) {
-                        return typeName.startsWith(type);
+                boolean hasMatch = false;
+                for (String type:types){
+                    if (typeName.startsWith(type)) {
+                        hasMatch = true;
+                        break;
                     }
-                }) ? prefix + value + postfix : value;
+                }
+                return hasMatch ? prefix + value + postfix : value;
             }
         }
 
         private static final String SINGLE_QUOTATION = "'";
-        private static List<ConstantStringWrapper> WRAPPERS = Lists.newArrayList(
-                new ConstantStringWrapper(Lists.newArrayList(
+        private static List<ConstantStringWrapper> WRAPPERS =  new ArrayList<>(Arrays.asList(
+                new ConstantStringWrapper(new ArrayList<>(Arrays.asList(
                         serdeConstants.STRING_TYPE_NAME, serdeConstants.CHAR_TYPE_NAME,
                         serdeConstants.VARCHAR_TYPE_NAME, serdeConstants.DATE_TYPE_NAME,
-                        serdeConstants.TIMESTAMP_TYPE_NAME
+                        serdeConstants.TIMESTAMP_TYPE_NAME)
                 ), SINGLE_QUOTATION, SINGLE_QUOTATION),
                 new ConstantStringWrapper(serdeConstants.DATE_TYPE_NAME, "to_date(", ")"),
-                new ConstantStringWrapper(serdeConstants.TIMESTAMP_TYPE_NAME, "to_timestamp(", ")")
+                new ConstantStringWrapper(serdeConstants.TIMESTAMP_TYPE_NAME, "to_timestamp(", ")"))
         );
 
         private String createConstantString(String typeName, String value) {
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/ColumnMappingUtils.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/ColumnMappingUtils.java
index f348c0f..d4e3067 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/ColumnMappingUtils.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/ColumnMappingUtils.java
@@ -18,13 +18,12 @@
 
 package org.apache.phoenix.hive.util;
 
-import com.google.common.base.Splitter;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
 
 import java.util.*;
-
+import java.util.stream.Collectors;
 
 /**
  * Util class for mapping between Hive and Phoenix column names
@@ -46,14 +45,16 @@ public class ColumnMappingUtils {
             return Collections.emptyMap();
         }
 
-        Map<String, String> columnMappingMap = Splitter.on(PhoenixStorageHandlerConstants.COMMA)
-                .trimResults().withKeyValueSeparator(PhoenixStorageHandlerConstants.COLON).split
-                        (columnMappings);
+        Map<String, String> columnMappingMap  = new HashMap<>();
+        for (String item:columnMappings.split(PhoenixStorageHandlerConstants.COMMA)) {
+            String[] kv= item.trim().split(PhoenixStorageHandlerConstants.COLON);
+            columnMappingMap.put(kv[0],kv[1].length()>1?kv[1]:"");
+        }
 
         if (LOG.isDebugEnabled()) {
             LOG.debug("Column mapping map : " + columnMappingMap);
         }
-
+        
         return columnMappingMap;
     }
 
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
index 19c26e5..176a2ec 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
@@ -17,8 +17,6 @@
  */
 package org.apache.phoenix.hive.util;
 
-import com.google.common.base.Joiner;
-import com.google.common.collect.Maps;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.lang.reflect.Array;
@@ -28,13 +26,10 @@ import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 import java.util.Map.Entry;
-import java.util.Properties;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
 import javax.naming.NamingException;
 import org.apache.commons.logging.Log;
 import org.apache.hadoop.conf.Configuration;
@@ -160,7 +155,7 @@ public class PhoenixStorageHandlerUtil {
     }
 
     // Copy from org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.reverseDNS
-    private static final Map<InetAddress, String> reverseDNSCacheMap = Maps.newConcurrentMap();
+    private static final Map<InetAddress, String> reverseDNSCacheMap = new ConcurrentHashMap<>();
 
     private static String reverseDNS(InetAddress ipAddress) throws NamingException,
             UnknownHostException {
@@ -195,7 +190,7 @@ public class PhoenixStorageHandlerUtil {
     }
 
     public static Map<String, TypeInfo> createColumnTypeMap(JobConf jobConf) {
-        Map<String, TypeInfo> columnTypeMap = Maps.newHashMap();
+        Map<String, TypeInfo> columnTypeMap = new HashMap();
 
         String[] columnNames = jobConf.get(serdeConstants.LIST_COLUMNS).split
                 (PhoenixStorageHandlerConstants.COMMA);
@@ -247,7 +242,8 @@ public class PhoenixStorageHandlerUtil {
         if (obj instanceof Array) {
             Object[] values = (Object[]) obj;
 
-            content = Joiner.on(PhoenixStorageHandlerConstants.COMMA).join(values);
+            content =
+            String.join(PhoenixStorageHandlerConstants.COMMA, (String[]) values);
         } else {
             content = obj.toString();
         }
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
index 9dcb3ef..a72f78e 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
@@ -17,10 +17,6 @@
  */
 package org.apache.phoenix.hive.util;
 
-import com.google.common.base.CharMatcher;
-import com.google.common.base.Splitter;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -41,7 +37,10 @@ import java.sql.DatabaseMetaData;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -55,8 +54,7 @@ public class PhoenixUtil {
 
     public static String getPhoenixType(String hiveTypeName) {
         if (hiveTypeName.startsWith("array")) {
-            List<String> tokenList = Lists.newArrayList(Splitter.on(CharMatcher.is('<').or
-                    (CharMatcher.is('>'))).split(hiveTypeName));
+            List<String> tokenList = new ArrayList<>(Arrays.asList(hiveTypeName.split("[<>]")));
             return getPhoenixType(tokenList.get(1)) + "[]";
         } else if (hiveTypeName.startsWith("int")) {
             return "integer";
@@ -89,7 +87,7 @@ public class PhoenixUtil {
 
     public static List<String> getPrimaryKeyColumnList(Connection conn, String tableName) throws
             SQLException {
-        Map<Short, String> primaryKeyColumnInfoMap = Maps.newHashMap();
+        Map<Short, String> primaryKeyColumnInfoMap = new HashMap<>();
         DatabaseMetaData dbMeta = conn.getMetaData();
 
         String[] schemaInfo = getTableSchema(tableName.toUpperCase());
@@ -103,7 +101,7 @@ public class PhoenixUtil {
             }
         }
 
-        return Lists.newArrayList(primaryKeyColumnInfoMap.values());
+        return new ArrayList<>(primaryKeyColumnInfoMap.values());
     }
 
     public static List<String> getPrimaryKeyColumnList(Configuration config, String tableName) {
diff --git a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java b/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
index cd0f93b..cfdf995 100644
--- a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
+++ b/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
@@ -17,7 +17,6 @@
  */
 package org.apache.phoenix.hive.query;
 
-import com.google.common.collect.Lists;
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
@@ -26,6 +25,8 @@ import org.apache.phoenix.hive.ql.index.IndexSearchCondition;
 import org.junit.Test;
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 import static org.mockito.Mockito.mock;
@@ -80,42 +81,42 @@ public class PhoenixQueryBuilderTest {
                 "\" from TEST_TABLE where ";
 
         JobConf jobConf = new JobConf();
-        List<String> readColumnList = Lists.newArrayList(COLUMN_CHAR, COLUMN_VARCHAR);
-        List<IndexSearchCondition> searchConditions = Lists.newArrayList(
+        List<String> readColumnList = new ArrayList<>(Arrays.asList(COLUMN_CHAR, COLUMN_VARCHAR));
+        List<IndexSearchCondition> searchConditions = new ArrayList<>(Arrays.asList(
                 mockedIndexSearchCondition("GenericUDFOPEqual", "CHAR_VALUE", null, COLUMN_CHAR, "char(10)", false),
                 mockedIndexSearchCondition("GenericUDFOPEqual", "CHAR_VALUE2", null, COLUMN_VARCHAR, "varchar(10)", false)
-        );
+        ));
 
         assertEquals(expectedQueryPrefix + "\"Column_Char\" = 'CHAR_VALUE' and \"Column_VChar\" = 'CHAR_VALUE2'",
                 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, searchConditions));
 
-        searchConditions = Lists.newArrayList(
+        searchConditions = new ArrayList<>(Arrays.asList(
                 mockedIndexSearchCondition("GenericUDFIn", null,
-                        new Object[]{"CHAR1", "CHAR2", "CHAR3"}, COLUMN_CHAR, "char(10)", false)
+                        new Object[]{"CHAR1", "CHAR2", "CHAR3"}, COLUMN_CHAR, "char(10)", false))
         );
 
         assertEquals(expectedQueryPrefix + "\"Column_Char\" in ('CHAR1', 'CHAR2', 'CHAR3')",
                 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, searchConditions));
 
-        searchConditions = Lists.newArrayList(
+        searchConditions = new ArrayList<>(Arrays.asList(
                 mockedIndexSearchCondition("GenericUDFIn", null,
-                        new Object[]{"CHAR1", "CHAR2", "CHAR3"}, COLUMN_CHAR, "char(10)", true)
+                        new Object[]{"CHAR1", "CHAR2", "CHAR3"}, COLUMN_CHAR, "char(10)", true))
         );
 
         assertEquals(expectedQueryPrefix + "\"Column_Char\" not in ('CHAR1', 'CHAR2', 'CHAR3')",
                 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, searchConditions));
 
-        searchConditions = Lists.newArrayList(
+        searchConditions = new ArrayList<>(Arrays.asList(
                 mockedIndexSearchCondition("GenericUDFBetween", null,
-                        new Object[]{"CHAR1", "CHAR2"}, COLUMN_CHAR, "char(10)", false)
+                        new Object[]{"CHAR1", "CHAR2"}, COLUMN_CHAR, "char(10)", false))
         );
 
         assertEquals(expectedQueryPrefix + "\"Column_Char\" between 'CHAR1' and 'CHAR2'",
                 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, searchConditions));
 
-        searchConditions = Lists.newArrayList(
+        searchConditions = new ArrayList<>(Arrays.asList(
                 mockedIndexSearchCondition("GenericUDFBetween", null,
-                        new Object[]{"CHAR1", "CHAR2"}, COLUMN_CHAR, "char(10)", true)
+                        new Object[]{"CHAR1", "CHAR2"}, COLUMN_CHAR, "char(10)", true))
         );
 
         assertEquals(expectedQueryPrefix + "\"Column_Char\" not between 'CHAR1' and 'CHAR2'",
@@ -130,21 +131,21 @@ public class PhoenixQueryBuilderTest {
                 "\" from " + tableName + " where ";
 
         JobConf jobConf = new JobConf();
-        List<String> readColumnList = Lists.newArrayList(COLUMN_DATE);
+        List<String> readColumnList = new ArrayList<>(Arrays.asList(COLUMN_DATE));
 
-        List<IndexSearchCondition> searchConditions = Lists.newArrayList(
+        List<IndexSearchCondition> searchConditions = new ArrayList<>(Arrays.asList(
                 mockedIndexSearchCondition("GenericUDFBetween", null,
                         new Object[]{"1992-01-02", "1992-02-02"}, COLUMN_DATE, "date", false)
-        );
+        ));
 
         assertEquals(expectedQueryPrefix +
                         "\"" + COLUMN_DATE + "\" between to_date('1992-01-02') and to_date('1992-02-02')",
                 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, searchConditions));
 
-        searchConditions = Lists.newArrayList(
+        searchConditions = new ArrayList<>(Arrays.asList(
                 mockedIndexSearchCondition("GenericUDFBetween", null,
                         new Object[]{"1992-01-02", "1992-02-02"}, COLUMN_DATE, "date", true)
-        );
+        ));
 
         assertEquals(expectedQueryPrefix +
                         "\"" + COLUMN_DATE + "\" not between to_date('1992-01-02') and to_date('1992-02-02')",
@@ -159,11 +160,11 @@ public class PhoenixQueryBuilderTest {
                 "\" from " + tableName + " where ";
 
         JobConf jobConf = new JobConf();
-        List<String> readColumnList = Lists.newArrayList(COLUMN_DATE);
+        List<String> readColumnList =new ArrayList<>(Arrays.asList(COLUMN_DATE));
 
-        List<IndexSearchCondition> searchConditions = Lists.newArrayList(
+        List<IndexSearchCondition> searchConditions = new ArrayList<>(Arrays.asList(
                 mockedIndexSearchCondition("GenericUDFOPNotNull", null,
-                        null, COLUMN_DATE, "date", true)
+                        null, COLUMN_DATE, "date", true))
         );
 
         assertEquals(expectedQueryPrefix +
@@ -179,11 +180,11 @@ public class PhoenixQueryBuilderTest {
           "\" from " + tableName + " where ";
 
         JobConf jobConf = new JobConf();
-        List<String> readColumnList = Lists.newArrayList(COLUMN_BIGINT);
+        List<String> readColumnList = new ArrayList<>(Arrays.asList(COLUMN_BIGINT));
 
-        List<IndexSearchCondition> searchConditions = Lists.newArrayList(
+        List<IndexSearchCondition> searchConditions = new ArrayList<>(Arrays.asList(
           mockedIndexSearchCondition("GenericUDFOPEqual", 100L,
-            null, COLUMN_BIGINT, "bigint", false)
+            null, COLUMN_BIGINT, "bigint", false))
         );
 
         assertEquals(expectedQueryPrefix + "\"" + COLUMN_BIGINT + "\" = 100",
diff --git a/phoenix-kafka/src/it/java/org/apache/phoenix/kafka/PhoenixConsumerIT.java b/phoenix-kafka/src/it/java/org/apache/phoenix/kafka/PhoenixConsumerIT.java
index cfec391..6de0a47 100644
--- a/phoenix-kafka/src/it/java/org/apache/phoenix/kafka/PhoenixConsumerIT.java
+++ b/phoenix-kafka/src/it/java/org/apache/phoenix/kafka/PhoenixConsumerIT.java
@@ -43,7 +43,7 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.common.io.Resources;
+
 
 import kafka.admin.AdminUtils;
 import kafka.server.KafkaConfig;
@@ -206,7 +206,7 @@ public class PhoenixConsumerIT extends BaseHBaseManagedTimeIT {
 
         PhoenixConsumerThread(PhoenixConsumer pConsumer, String path) {
             this.pConsumer = pConsumer;
-            try (InputStream props = Resources.getResource(path).openStream()) {
+            try (InputStream props = getClass().getClassLoader().getResourceAsStream(path)) {
                 Properties properties = new Properties();
                 properties.load(props);
                 this.properties = properties;
@@ -243,7 +243,7 @@ public class PhoenixConsumerIT extends BaseHBaseManagedTimeIT {
 
         KafkaProducerThread(String path, String topic) {
             this.topic = topic;
-            try (InputStream props = Resources.getResource(path).openStream()) {
+            try (InputStream props = getClass().getClassLoader().getResourceAsStream(path)) {
                 Properties properties = new Properties();
                 properties.load(props);
                 producer = new KafkaProducer<>(properties);
diff --git a/phoenix-kafka/src/main/java/org/apache/phoenix/kafka/consumer/PhoenixConsumer.java b/phoenix-kafka/src/main/java/org/apache/phoenix/kafka/consumer/PhoenixConsumer.java
index 1759cec..86ab66c 100644
--- a/phoenix-kafka/src/main/java/org/apache/phoenix/kafka/consumer/PhoenixConsumer.java
+++ b/phoenix-kafka/src/main/java/org/apache/phoenix/kafka/consumer/PhoenixConsumer.java
@@ -20,6 +20,7 @@ package org.apache.phoenix.kafka.consumer;
 import java.io.IOException;
 import java.io.InputStream;
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
@@ -45,9 +46,6 @@ import org.apache.phoenix.kafka.KafkaConstants;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Preconditions;
-import com.google.common.base.Throwables;
-import com.google.common.collect.Lists;
 
 public class PhoenixConsumer {
     private static final Logger logger = LoggerFactory.getLogger(PhoenixConsumer.class);
@@ -80,14 +78,16 @@ public class PhoenixConsumer {
     
     /**
      * Initializes the kafka with properties file.
-     * @param path
+     * @param conf
      * @throws IOException 
      */
     public void intializeKafka(Configuration conf) throws IOException {
     	// get the kafka consumer file
     	String file = conf.get("kafka.consumer.file");
-        Preconditions.checkNotNull(file,"File path cannot be empty, please specify in the arguments");
-        
+    	if(file==null){
+    	    throw new NullPointerException("File path cannot be empty, please specify in the arguments");
+      }
+
         Path path = new Path(file);
         FileSystem fs = FileSystem.get(conf);
         try (InputStream props = fs.open(path)) {
@@ -107,7 +107,9 @@ public class PhoenixConsumer {
         this.properties = properties;
         
         String servers = properties.getProperty(KafkaConstants.BOOTSTRAP_SERVERS);
-        Preconditions.checkNotNull(servers,"Bootstrap Servers cannot be empty, please specify in the configuration file");
+        if(servers ==null){
+            throw new NullPointerException("Bootstrap Servers cannot be empty, please specify in the configuration file");
+        }
         properties.setProperty(KafkaConstants.BOOTSTRAP_SERVERS, servers);
         
         if (properties.getProperty(KafkaConstants.GROUP_ID) == null) {
@@ -119,8 +121,11 @@ public class PhoenixConsumer {
         }
 
         String topics = properties.getProperty(KafkaConstants.TOPICS);
-        Preconditions.checkNotNull(topics,"Topics cannot be empty, please specify in the configuration file");
-        
+
+        if (topics == null) {
+            throw new NullPointerException("Topics cannot be empty, please specify in the configuration file");
+        }
+
         properties.setProperty(KafkaConstants.KEY_DESERIALIZER, KafkaConstants.DEFAULT_KEY_DESERIALIZER);
         
         properties.setProperty(KafkaConstants.VALUE_DESERIALIZER, KafkaConstants.DEFAULT_VALUE_DESERIALIZER);
@@ -147,8 +152,9 @@ public class PhoenixConsumer {
         this.timeout = context.getLong(KafkaConstants.TIMEOUT, KafkaConstants.DEFAULT_TIMEOUT);
         this.batchSize = context.getInteger(FlumeConstants.CONFIG_BATCHSIZE, FlumeConstants.DEFAULT_BATCH_SIZE);
         final String eventSerializerType = context.getString(FlumeConstants.CONFIG_SERIALIZER);
-        
-        Preconditions.checkNotNull(eventSerializerType,"Event serializer cannot be empty, please specify in the configuration file");
+        if (eventSerializerType ==null){
+            throw new NullPointerException("Event serializer cannot be empty, please specify in the configuration file");
+        }
         initializeSerializer(context,eventSerializerType);
     }
     
@@ -171,7 +177,7 @@ public class PhoenixConsumer {
             }
 
             if (!records.isEmpty()) {
-                List<Event> events = Lists.newArrayListWithCapacity(records.count());
+                List<Event> events = new ArrayList<>(records.count());
                 for (ConsumerRecord<String, String> record : records) {
                     Event event = EventBuilder.withBody(Bytes.toBytes(record.value()));
                     events.add(event);
@@ -195,7 +201,12 @@ public class PhoenixConsumer {
             serializer.initialize();
         } catch (Exception ex) {
             logger.error("Error {} in initializing the serializer.", ex.getMessage());
-            Throwables.propagate(ex);
+            if (ex instanceof RuntimeException){
+                throw RuntimeException.class.cast(ex);
+            }
+            else {
+                throw new RuntimeException(ex);
+            }
         }
     }
 
@@ -256,7 +267,12 @@ public class PhoenixConsumer {
             serializer.configure(serializerContext);
         } catch (Exception e) {
             logger.error("Could not instantiate event serializer.", e);
-            Throwables.propagate(e);
+            if (e instanceof RuntimeException){
+                throw (RuntimeException)e;
+            }
+            else {
+                throw new RuntimeException(e);
+            }
         }
     }
     
diff --git a/phoenix-pig/src/main/java/org/apache/phoenix/pig/PhoenixHBaseLoader.java b/phoenix-pig/src/main/java/org/apache/phoenix/pig/PhoenixHBaseLoader.java
index 7380b8a..ca212e5 100644
--- a/phoenix-pig/src/main/java/org/apache/phoenix/pig/PhoenixHBaseLoader.java
+++ b/phoenix-pig/src/main/java/org/apache/phoenix/pig/PhoenixHBaseLoader.java
@@ -52,7 +52,6 @@ import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.util.ObjectSerializer;
 import org.apache.pig.impl.util.UDFContext;
 
-import com.google.common.base.Preconditions;
 
 
 /**
@@ -98,8 +97,14 @@ public final class PhoenixHBaseLoader extends LoadFunc implements LoadMetadata {
      */
     public PhoenixHBaseLoader(String zkQuorum) {
         super();
-        Preconditions.checkNotNull(zkQuorum);
-        Preconditions.checkState(zkQuorum.length() > 0, "Zookeeper quorum cannot be empty!");
+        if (zkQuorum == null){
+            throw new NullPointerException();
+        }
+
+        if (!(zkQuorum.length() > 0)) {
+            throw new IllegalStateException("Zookeeper quorum cannot be empty!");
+        }
+
         this.zkQuorum = zkQuorum;
     }
     
diff --git a/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/PhoenixPigSchemaUtilTest.java b/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/PhoenixPigSchemaUtilTest.java
index 9dc703b..2e6794a 100644
--- a/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/PhoenixPigSchemaUtilTest.java
+++ b/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/PhoenixPigSchemaUtilTest.java
@@ -25,6 +25,8 @@ import static org.mockito.Mockito.when;
 import java.io.IOException;
 import java.sql.SQLException;
 import java.sql.Types;
+import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
@@ -38,7 +40,6 @@ import org.apache.pig.ResourceSchema.ResourceFieldSchema;
 import org.apache.pig.data.DataType;
 import org.junit.Test;
 
-import com.google.common.collect.Lists;
 
 /**
  * 
@@ -59,7 +60,7 @@ public class PhoenixPigSchemaUtilTest {
 				configuration, new Dependencies() {
 					List<ColumnInfo> getSelectColumnMetadataList(
 							Configuration configuration) throws SQLException {
-						return Lists.newArrayList(ID_COLUMN, NAME_COLUMN);
+						return new ArrayList<>(Arrays.asList(ID_COLUMN, NAME_COLUMN));
 					}
 				});        
         // expected schema.
@@ -84,7 +85,7 @@ public class PhoenixPigSchemaUtilTest {
 				configuration, new Dependencies() {
 					List<ColumnInfo> getSelectColumnMetadataList(
 							Configuration configuration) throws SQLException {
-						return Lists.newArrayList(ID_COLUMN, LOCATION_COLUMN);
+						return new ArrayList<>(Arrays.asList(ID_COLUMN, LOCATION_COLUMN));
 					}
 				});  
         fail("We currently don't support Array type yet. WIP!!");
diff --git a/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/QuerySchemaParserFunctionTest.java b/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/QuerySchemaParserFunctionTest.java
index 474d9e2..4b13691 100644
--- a/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/QuerySchemaParserFunctionTest.java
+++ b/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/QuerySchemaParserFunctionTest.java
@@ -30,7 +30,6 @@ import org.junit.Before;
 import org.junit.Test;
 import org.mockito.Mockito;
 
-import com.google.common.base.Joiner;
 
 /**
  * 
@@ -67,7 +66,7 @@ public class QuerySchemaParserFunctionTest extends BaseConnectionlessQueryTest {
         Pair<String,String> pair = function.apply(selectQuery);
          
         assertEquals(pair.getFirst(), "EMPLOYEE");
-        assertEquals(pair.getSecond(),Joiner.on(',').join("NAME","AGE","LOCATION"));
+        assertEquals(pair.getSecond(), String.join(",", "NAME","AGE","LOCATION"));
     }
     
     @Test(expected=RuntimeException.class)
diff --git a/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TableSchemaParserFunctionTest.java b/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TableSchemaParserFunctionTest.java
index 9e5a294..ea36b63 100644
--- a/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TableSchemaParserFunctionTest.java
+++ b/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TableSchemaParserFunctionTest.java
@@ -24,7 +24,8 @@ import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.pig.util.TableSchemaParserFunction;
 import org.junit.Test;
 
-import com.google.common.base.Joiner;
+import java.util.ArrayList;
+import java.util.Arrays;
 
 public class TableSchemaParserFunctionTest {
 
@@ -35,7 +36,8 @@ public class TableSchemaParserFunctionTest {
         final String loadTableSchema = "EMPLOYEE/col1,col2";
         final Pair<String,String> pair = function.apply(loadTableSchema);
         assertEquals("EMPLOYEE", pair.getFirst());
-        assertEquals(pair.getSecond(),Joiner.on(',').join("col1","col2"));
+        assertEquals(pair.getSecond(),
+        String.join(",","col1","col2" ));
     }
     
     @Test(expected=IllegalArgumentException.class)
diff --git a/phoenix-spark/src/it/java/org/apache/phoenix/spark/OrderByIT.java b/phoenix-spark/src/it/java/org/apache/phoenix/spark/OrderByIT.java
index e44b011..d269a28 100644
--- a/phoenix-spark/src/it/java/org/apache/phoenix/spark/OrderByIT.java
+++ b/phoenix-spark/src/it/java/org/apache/phoenix/spark/OrderByIT.java
@@ -29,6 +29,8 @@ import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
 
@@ -42,7 +44,6 @@ import org.apache.spark.sql.SQLContext;
 import org.apache.spark.sql.sources.v2.DataSourceOptions;
 import org.junit.Test;
 
-import com.google.common.collect.Lists;
 
 import scala.Option;
 import scala.collection.JavaConverters;
@@ -123,7 +124,8 @@ public class OrderByIT extends BaseOrderByIT {
             conn.commit();
 
             // create two PhoenixRDDs  using the table names and columns that are required for the JOIN query
-            List<String> table1Columns = Lists.newArrayList("A_STRING", "CF1.A", "CF1.B", "COL1", "CF2.C", "CF2.D");
+            List<String> table1Columns = new ArrayList(
+                Arrays.asList("A_STRING", "CF1.A", "CF1.B", "COL1", "CF2.C", "CF2.D"));
             SQLContext sqlContext = SparkUtil.getSparkSession().sqlContext();
             Dataset phoenixDataSet = SparkUtil.getSparkSession().read().format("phoenix")
                     .option(DataSourceOptions.TABLE_KEY, tableName1)
diff --git a/phoenix-spark/src/it/java/org/apache/phoenix/spark/SparkUtil.java b/phoenix-spark/src/it/java/org/apache/phoenix/spark/SparkUtil.java
index 668c3c8..1c36f01 100644
--- a/phoenix-spark/src/it/java/org/apache/phoenix/spark/SparkUtil.java
+++ b/phoenix-spark/src/it/java/org/apache/phoenix/spark/SparkUtil.java
@@ -18,7 +18,6 @@
  */
 package org.apache.phoenix.spark;
 
-import com.google.common.base.Joiner;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.query.QueryServices;
@@ -60,7 +59,7 @@ public class SparkUtil {
         // here we assume that the required columns are in the primary key column order
         String prevOrderBy = queryBuilder.getOrderByClause();
         if (forceRowKeyOrder &&  (queryBuilder.getOrderByClause()==null || queryBuilder.getOrderByClause().isEmpty())) {
-            queryBuilder.setOrderByClause(Joiner.on(", ").join(queryBuilder.getRequiredColumns()));
+            queryBuilder.setOrderByClause(String.join(", ", queryBuilder.getRequiredColumns()));
         }
 
         // create PhoenixRDD using the table name and columns that are required by the query
diff --git a/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReadOptions.java b/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReadOptions.java
index 67343d4..5b3df46 100644
--- a/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReadOptions.java
+++ b/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReadOptions.java
@@ -17,8 +17,6 @@
  */
 package org.apache.phoenix.spark.datasource.v2.reader;
 
-import com.google.common.base.Preconditions;
-
 import java.io.Serializable;
 import java.util.Properties;
 
@@ -32,7 +30,9 @@ class PhoenixDataSourceReadOptions implements Serializable {
 
     PhoenixDataSourceReadOptions(String zkUrl, String scn, String tenantId,
             String selectStatement, Properties overriddenProps) {
-        Preconditions.checkNotNull(overriddenProps);
+        if(overriddenProps == null){
+            throw new NullPointerException();
+        }
         this.zkUrl = zkUrl;
         this.scn = scn;
         this.tenantId = tenantId;
diff --git a/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReader.java b/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReader.java
index 18e304b..36b7efb 100644
--- a/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReader.java
+++ b/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReader.java
@@ -17,8 +17,6 @@
  */
 package org.apache.phoenix.spark.datasource.v2.reader;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.RegionLocator;
@@ -53,6 +51,8 @@ import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 import java.util.Optional;
@@ -139,10 +139,13 @@ public class PhoenixDataSourceReader implements DataSourceReader, SupportsPushDo
         }
         try (Connection conn = DriverManager.getConnection(
                 JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + zkUrl, overriddenProps)) {
-            List<ColumnInfo> columnInfos = PhoenixRuntime.generateColumnInfo(conn, tableName, Lists.newArrayList(schema.names()));
+            List<ColumnInfo> columnInfos = PhoenixRuntime.generateColumnInfo(conn, tableName, new ArrayList<>(
+                Arrays.asList(schema.names())));
             final Statement statement = conn.createStatement();
             final String selectStatement = QueryUtil.constructSelectStatement(tableName, columnInfos, whereClause);
-            Preconditions.checkNotNull(selectStatement);
+            if (selectStatement == null){
+                throw new NullPointerException();
+            }
 
             final PhoenixStatement pstmt = statement.unwrap(PhoenixStatement.class);
             // Optimize the query plan so that we potentially use secondary indexes
@@ -168,7 +171,7 @@ public class PhoenixDataSourceReader implements DataSourceReader, SupportsPushDo
             RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(regionLocator, connection
                     .getAdmin());
 
-            final List<InputPartition<InternalRow>> partitions = Lists.newArrayListWithExpectedSize(allSplits.size());
+            final List<InputPartition<InternalRow>> partitions = new ArrayList<>(allSplits.size());
             for (List<Scan> scans : queryPlan.getScans()) {
                 // Get the region location
                 HRegionLocation location = regionLocator.getRegionLocation(
diff --git a/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixInputPartitionReader.java b/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixInputPartitionReader.java
index 49f6c13..3b6e8e3 100644
--- a/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixInputPartitionReader.java
+++ b/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixInputPartitionReader.java
@@ -22,6 +22,7 @@ import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Properties;
 
@@ -52,9 +53,6 @@ import org.apache.spark.sql.execution.datasources.SparkJdbcUtil;
 import org.apache.spark.sql.sources.v2.reader.InputPartitionReader;
 import org.apache.spark.sql.types.StructType;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-
 import scala.collection.Iterator;
 
 import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL;
@@ -96,7 +94,9 @@ public class PhoenixInputPartitionReader implements InputPartitionReader<Interna
                 JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + zkUrl, overridingProps)) {
             final Statement statement = conn.createStatement();
             final String selectStatement = options.getSelectStatement();
-            Preconditions.checkNotNull(selectStatement);
+            if (selectStatement == null){
+                throw new NullPointerException();
+            }
 
             final PhoenixStatement pstmt = statement.unwrap(PhoenixStatement.class);
             // Optimize the query plan so that we potentially use secondary indexes
@@ -108,7 +108,7 @@ public class PhoenixInputPartitionReader implements InputPartitionReader<Interna
         try {
             final QueryPlan queryPlan = getQueryPlan();
             final List<Scan> scans = phoenixInputSplit.value().getScans();
-            List<PeekingResultIterator> iterators = Lists.newArrayListWithExpectedSize(scans.size());
+            List<PeekingResultIterator> iterators = new ArrayList<>(scans.size());
             StatementContext ctx = queryPlan.getContext();
             ReadMetricQueue readMetrics = ctx.getReadMetricsQueue();
             String tableName = queryPlan.getTableRef().getTable().getPhysicalName().getString();
diff --git a/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/writer/PhoenixDataWriter.java b/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/writer/PhoenixDataWriter.java
index f67695c..a5199cd 100644
--- a/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/writer/PhoenixDataWriter.java
+++ b/phoenix-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/writer/PhoenixDataWriter.java
@@ -23,6 +23,7 @@ import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
 import java.util.stream.Collectors;
@@ -46,8 +47,6 @@ import org.apache.spark.sql.catalyst.analysis.SimpleAnalyzer$;
 import org.apache.spark.sql.catalyst.expressions.AttributeReference;
 import org.apache.spark.sql.catalyst.expressions.Attribute;
 
-import com.google.common.collect.Lists;
-
 import static org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil.DEFAULT_UPSERT_BATCH_SIZE;
 import static org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil.UPSERT_BATCH_SIZE;
 import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL;
@@ -85,7 +84,7 @@ public class PhoenixDataWriter implements DataWriter<InternalRow> {
         try {
             this.conn = DriverManager.getConnection(JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + zkUrl,
                     overridingProps);
-            List<String> colNames = Lists.newArrayList(options.getSchema().names());
+            List<String> colNames =  new ArrayList<>(Arrays.asList(options.getSchema().names()));
             if (!options.skipNormalizingIdentifier()){
                 colNames = colNames.stream().map(SchemaUtil::normalizeIdentifier).collect(Collectors.toList());
             }
diff --git a/pom.xml b/pom.xml
index 2c473f8..7436818 100644
--- a/pom.xml
+++ b/pom.xml
@@ -69,7 +69,7 @@
     <disruptor.version>3.3.6</disruptor.version>
     <slf4j.version>1.6.4</slf4j.version>
     <commons-csv.version>1.0</commons-csv.version>
-    <guava.version>13.0.1</guava.version>
+
     <flume.version>1.4.0</flume.version>
     <kafka.version>0.9.0.0</kafka.version>
     <spark.version>2.4.0</spark.version>