You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2019/11/12 08:16:40 UTC

[hive] branch master updated: HIVE-22392: Hive JDBC Storage Handler: Support For Writing Data to JDBC Data Source (Syed Shameerur Rahman, reviewed by Jesus Camacho Rodriguez)

This is an automated email from the ASF dual-hosted git repository.

jcamacho pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new f361fc6  HIVE-22392: Hive JDBC Storage Handler: Support For Writing Data to JDBC Data Source (Syed Shameerur Rahman, reviewed by Jesus Camacho Rodriguez)
f361fc6 is described below

commit f361fc666a601707fabc3786d80a8bfa2cdd9e02
Author: Syed Shameerur Rahman <sr...@qubole.com>
AuthorDate: Tue Nov 12 00:14:47 2019 -0800

    HIVE-22392: Hive JDBC Storage Handler: Support For Writing Data to JDBC Data Source (Syed Shameerur Rahman, reviewed by Jesus Camacho Rodriguez)
    
    Close apache/hive#829
---
 .../apache/hive/storage/jdbc/DBRecordWritable.java |  77 ++++++++
 .../apache/hive/storage/jdbc/JdbcOutputFormat.java |  10 +-
 .../apache/hive/storage/jdbc/JdbcRecordWriter.java |  70 +++++++
 .../org/apache/hive/storage/jdbc/JdbcSerDe.java    |  73 +++++++-
 .../hive/storage/jdbc/dao/DB2DatabaseAccessor.java |  19 ++
 .../hive/storage/jdbc/dao/DatabaseAccessor.java    |   6 +
 .../storage/jdbc/dao/DatabaseAccessorFactory.java  |   4 +
 ...aseAccessor.java => DerbyDatabaseAccessor.java} |  32 ++--
 .../jdbc/dao/GenericJdbcDatabaseAccessor.java      |  53 ++++++
 .../storage/jdbc/dao/OracleDatabaseAccessor.java   |  19 ++
 ql/src/test/queries/clientpositive/jdbc_handler.q  |  73 ++++++++
 .../results/clientpositive/llap/jdbc_handler.q.out | 207 +++++++++++++++++++++
 12 files changed, 619 insertions(+), 24 deletions(-)

diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/DBRecordWritable.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/DBRecordWritable.java
new file mode 100644
index 0000000..b062aa3
--- /dev/null
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/DBRecordWritable.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.storage.jdbc;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Arrays;
+import org.apache.hadoop.io.Writable;
+
+/**
+ * DBRecordWritable writes serialized row by row data to the underlying database.
+ */
+public class DBRecordWritable implements Writable,
+        org.apache.hadoop.mapreduce.lib.db.DBWritable {
+
+  private Object[] columnValues;
+
+  public DBRecordWritable() {
+  }
+
+  public DBRecordWritable(int numColumns) {
+    this.columnValues = new Object[numColumns];
+  }
+
+  public void clear() {
+    Arrays.fill(columnValues, null);
+  }
+
+  public void set(int i, Object columnObject) {
+    columnValues[i] = columnObject;
+  }
+
+  @Override
+  public void readFields(ResultSet rs) throws SQLException {
+    // do nothing
+  }
+
+  @Override
+  public void write(PreparedStatement statement) throws SQLException {
+    if (columnValues == null) {
+      throw new SQLException("No data available to be written");
+    }
+    for (int i = 0; i < columnValues.length; i++) {
+      statement.setObject(i + 1, columnValues[i]);
+    }
+  }
+
+  @Override
+  public void readFields(DataInput in) throws IOException {
+    // do nothing
+  }
+
+  @Override
+  public void write(DataOutput out) throws IOException {
+    // do nothing
+  }
+
+}
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcOutputFormat.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcOutputFormat.java
index 26fb3cd..e7b3f76 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcOutputFormat.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcOutputFormat.java
@@ -18,12 +18,16 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.MapWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormat;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.util.Progressable;
+import org.apache.hive.storage.jdbc.dao.DatabaseAccessor;
+import org.apache.hive.storage.jdbc.dao.DatabaseAccessorFactory;
 
 import java.io.IOException;
 import java.util.Properties;
@@ -41,7 +45,11 @@ public class JdbcOutputFormat implements OutputFormat<NullWritable, MapWritable>
       boolean isCompressed,
       Properties tableProperties,
       Progressable progress) throws IOException {
-    throw new UnsupportedOperationException("Write operations are not allowed.");
+    DatabaseAccessor dbAccessor = DatabaseAccessorFactory.getAccessor(jc);
+    TaskAttemptContext taskAttemptContext = ShimLoader.getHadoopShims().newTaskAttemptContext(jc, null);
+    org.apache.hadoop.mapreduce.RecordWriter recordWriter = dbAccessor.getRecordWriter(taskAttemptContext);
+    // Wrapping DBRecordWriter in JdbcRecordWriter
+    return new JdbcRecordWriter((org.apache.hadoop.mapreduce.lib.db.DBOutputFormat.DBRecordWriter) recordWriter);
   }
 
 
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcRecordWriter.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcRecordWriter.java
new file mode 100644
index 0000000..c27f1d5
--- /dev/null
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcRecordWriter.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.storage.jdbc;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.mapreduce.lib.db.DBOutputFormat.DBRecordWriter;
+import org.apache.hadoop.util.StringUtils;
+
+/**
+ * JdbcRecordWriter is wrapper class to write data to the underlying database.
+ */
+public class JdbcRecordWriter implements RecordWriter {
+  private static final Log LOG = LogFactory.getLog(JdbcRecordWriter.class);
+
+  @SuppressWarnings("rawtypes")
+  private final DBRecordWriter dbRecordWriter;
+
+  @SuppressWarnings("rawtypes")
+  public JdbcRecordWriter(DBRecordWriter writer) {
+    this.dbRecordWriter = writer;
+  }
+
+  @SuppressWarnings("unchecked")
+  @Override
+  public void write(Writable w) throws IOException {
+    dbRecordWriter.write((DBRecordWritable) w, null);
+  }
+
+  @Override
+  public void close(boolean abort) throws IOException {
+    if (abort) {
+      Connection conn = dbRecordWriter.getConnection();
+      try {
+        conn.rollback();
+      } catch (SQLException ex) {
+        LOG.warn(StringUtils.stringifyException(ex));
+      } finally {
+        try {
+          conn.close();
+        } catch (SQLException ex) {
+          throw new IOException(ex.getMessage());
+        }
+      }
+    } else {
+      dbRecordWriter.close(null);
+    }
+  }
+
+}
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java
index 0a1bf2d..04927a8 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java
@@ -24,6 +24,7 @@ import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -61,9 +62,11 @@ public class JdbcSerDe extends AbstractSerDe {
   private static final Logger LOGGER = LoggerFactory.getLogger(JdbcSerDe.class);
 
   private String[] hiveColumnNames;
+  private int numColumns;
   private PrimitiveTypeInfo[] hiveColumnTypes;
   private ObjectInspector inspector;
   private List<Object> row;
+  private DBRecordWritable dbRecordWritable;
 
 
   /*
@@ -113,6 +116,9 @@ public class JdbcSerDe extends AbstractSerDe {
           throw new SerDeException("Received an empty Hive column type definition");
         }
 
+        numColumns = hiveColumnNames.length;
+        dbRecordWritable = new DBRecordWritable(numColumns);
+
         // Populate column types and inspector
         hiveColumnTypes = new PrimitiveTypeInfo[hiveColumnTypesList.size()];
         List<ObjectInspector> fieldInspectors = new ArrayList<>(hiveColumnNames.length);
@@ -143,6 +149,67 @@ public class JdbcSerDe extends AbstractSerDe {
   }
 
   @Override
+  public DBRecordWritable serialize(Object row, ObjectInspector inspector) throws SerDeException {
+    LOGGER.trace("Serializing from SerDe");
+    if ((row == null) || (hiveColumnTypes == null)) {
+      throw new SerDeException("JDBC SerDe hasn't been initialized properly");
+    }
+
+    if (((Object[]) row).length != numColumns) {
+      throw new SerDeException(String.format("Required %d columns, received %d.", numColumns, ((Object[]) row).length));
+    }
+
+    dbRecordWritable.clear();
+    for (int i = 0; i < numColumns; i++) {
+      Object rowData = ((Object[]) row)[i];
+      switch (hiveColumnTypes[i].getPrimitiveCategory()) {
+      case INT:
+        rowData = Integer.valueOf(rowData.toString());
+        break;
+      case SHORT:
+        rowData = Short.valueOf(rowData.toString());
+        break;
+      case BYTE:
+        rowData = Byte.valueOf(rowData.toString());
+        break;
+      case LONG:
+        rowData = Long.valueOf(rowData.toString());
+        break;
+      case FLOAT:
+        rowData = Float.valueOf(rowData.toString());
+        break;
+      case DOUBLE:
+        rowData = Double.valueOf(rowData.toString());
+        break;
+      case DECIMAL:
+        int scale = ((HiveDecimalWritable) rowData).getScale();
+        long value = ((HiveDecimalWritable) rowData).getHiveDecimal().unscaledValue().longValue();
+        rowData = java.math.BigDecimal.valueOf(value, scale);
+        break;
+      case BOOLEAN:
+        rowData = Boolean.valueOf(rowData.toString());
+        break;
+      case CHAR:
+      case VARCHAR:
+      case STRING:
+        rowData = String.valueOf(rowData.toString());
+        break;
+      case DATE:
+        rowData = java.sql.Date.valueOf(rowData.toString());
+        break;
+      case TIMESTAMP:
+        rowData = java.sql.Timestamp.valueOf(rowData.toString());
+        break;
+      default:
+        //do nothing
+        break;
+      }
+      dbRecordWritable.set(i, rowData);
+    }
+    return dbRecordWritable;
+  }
+
+  @Override
   public Object deserialize(Writable blob) throws SerDeException {
     LOGGER.trace("Deserializing from SerDe");
     if (!(blob instanceof MapWritable)) {
@@ -271,12 +338,6 @@ public class JdbcSerDe extends AbstractSerDe {
 
 
   @Override
-  public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
-    throw new UnsupportedOperationException("Writes are not allowed");
-  }
-
-
-  @Override
   public SerDeStats getSerDeStats() {
     return null;
   }
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DB2DatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DB2DatabaseAccessor.java
index fab9829..e9f1631 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DB2DatabaseAccessor.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DB2DatabaseAccessor.java
@@ -41,4 +41,23 @@ public class DB2DatabaseAccessor extends GenericJdbcDatabaseAccessor {
     }
     return sql + " LIMIT " + limit;
   }
+
+  @Override
+  protected String constructQuery(String table, String[] columnNames) {
+    if(columnNames == null) {
+      throw new IllegalArgumentException("Column names may not be null");
+    }
+
+    StringBuilder query = new StringBuilder();
+    query.append("INSERT INTO ").append(table).append(" VALUES (");
+
+    for (int i = 0; i < columnNames.length; i++) {
+      query.append("?");
+      if(i != columnNames.length - 1) {
+        query.append(",");
+      }
+    }
+    query.append(")");
+    return query.toString();
+  }
 }
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessor.java
index a6d0306..654205d 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessor.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessor.java
@@ -18,8 +18,11 @@ import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hadoop.conf.Configuration;
 
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException;
 
+import java.io.IOException;
 import java.util.List;
 
 public interface DatabaseAccessor {
@@ -33,6 +36,9 @@ public interface DatabaseAccessor {
           offset) throws
           HiveJdbcDatabaseAccessException;
 
+  RecordWriter getRecordWriter(TaskAttemptContext context)
+      throws IOException;
+
   Pair<String, String> getBounds(Configuration conf, String partitionColumn, boolean lower, boolean upper) throws
           HiveJdbcDatabaseAccessException;
 
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java
index e531ecc..f39a641 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java
@@ -55,6 +55,10 @@ public class DatabaseAccessorFactory {
       accessor = new DB2DatabaseAccessor();
       break;
 
+    case DERBY:
+      accessor = new DerbyDatabaseAccessor();
+      break;
+
     default:
       accessor = new GenericJdbcDatabaseAccessor();
       break;
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DB2DatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DerbyDatabaseAccessor.java
similarity index 57%
copy from jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DB2DatabaseAccessor.java
copy to jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DerbyDatabaseAccessor.java
index fab9829..dcc49d6 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DB2DatabaseAccessor.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DerbyDatabaseAccessor.java
@@ -18,27 +18,25 @@
 package org.apache.hive.storage.jdbc.dao;
 
 /**
- * DB2 specific data accessor. DB2 JDBC drivers works similar to Postgres, so the current
- * implementation of DB2DatabaseAccessor is the same as PostgresDatabaseAccessor
+ * Derby specific data accessor.
  */
-public class DB2DatabaseAccessor extends GenericJdbcDatabaseAccessor {
+public class DerbyDatabaseAccessor extends GenericJdbcDatabaseAccessor {
   @Override
-  protected String addLimitAndOffsetToQuery(String sql, int limit, int offset) {
-    if (offset == 0) {
-      return addLimitToQuery(sql, limit);
-    } else {
-      if (limit == -1) {
-        return sql;
-      }
-      return sql + " LIMIT " + limit + " OFFSET " + offset;
+  protected String constructQuery(String table, String[] columnNames) {
+    if(columnNames == null) {
+      throw new IllegalArgumentException("Column names may not be null");
     }
-  }
 
-  @Override
-  protected String addLimitToQuery(String sql, int limit) {
-    if (limit == -1) {
-      return sql;
+    StringBuilder query = new StringBuilder();
+    query.append("INSERT INTO ").append(table).append(" VALUES (");
+
+    for (int i = 0; i < columnNames.length; i++) {
+      query.append("?");
+      if(i != columnNames.length - 1) {
+        query.append(",");
+      }
     }
-    return sql + " LIMIT " + limit;
+    query.append(")");
+    return query.toString();
   }
 }
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
index c2e7473..8dd9b93 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
@@ -21,6 +21,9 @@ import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.Constants;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -31,6 +34,7 @@ import org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException;
 
 import javax.sql.DataSource;
 
+import java.io.IOException;
 import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
@@ -172,6 +176,55 @@ public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
     }
   }
 
+  public RecordWriter getRecordWriter(TaskAttemptContext context)
+          throws IOException {
+    Configuration conf = context.getConfiguration();
+    String tableName =  conf.get(JdbcStorageConfig.TABLE.getPropertyName());
+
+    if (tableName == null || tableName.isEmpty()) {
+      throw new IllegalArgumentException("Table name should be defined");
+    }
+    Connection conn = null;
+    PreparedStatement ps = null;
+    String[] columnNames = conf.get(serdeConstants.LIST_COLUMNS).split(",");
+
+    try {
+      initializeDatabaseConnection(conf);
+      conn = dbcpDataSource.getConnection();
+      ps = conn.prepareStatement(constructQuery(tableName, columnNames));
+      return new org.apache.hadoop.mapreduce.lib.db.DBOutputFormat()
+              .new DBRecordWriter(conn, ps);
+    } catch (Exception e) {
+      cleanupResources(conn, ps, null);
+      throw new IOException(e.getMessage());
+    }
+  }
+
+  /**
+   * Constructs the query used as the prepared statement to insert data.
+   *
+   * @param table
+   *          the table to insert into
+   * @param columnNames
+   *          the columns to insert into
+   */
+  protected String constructQuery(String table, String[] columnNames) {
+    if(columnNames == null) {
+      throw new IllegalArgumentException("Column names may not be null");
+    }
+
+    StringBuilder query = new StringBuilder();
+    query.append("INSERT INTO ").append(table).append(" VALUES (");
+
+    for (int i = 0; i < columnNames.length; i++) {
+      query.append("?");
+      if(i != columnNames.length - 1) {
+        query.append(",");
+      }
+    }
+    query.append(");");
+    return query.toString();
+  }
 
   /**
    * Uses generic JDBC escape functions to add a limit and offset clause to a query string
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/OracleDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/OracleDatabaseAccessor.java
index 4a993fb..636f6bd 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/OracleDatabaseAccessor.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/OracleDatabaseAccessor.java
@@ -49,4 +49,23 @@ public class OracleDatabaseAccessor extends GenericJdbcDatabaseAccessor {
     return "SELECT * FROM (" + sql + ") WHERE ROWNUM <= " + limit;
   }
 
+  @Override
+  protected String constructQuery(String table, String[] columnNames) {
+    if(columnNames == null) {
+      throw new IllegalArgumentException("Column names may not be null");
+    }
+
+    StringBuilder query = new StringBuilder();
+    query.append("INSERT INTO ").append(table).append(" VALUES (");
+
+    for (int i = 0; i < columnNames.length; i++) {
+      query.append("?");
+      if(i != columnNames.length - 1) {
+        query.append(",");
+      }
+    }
+    query.append(")");
+    return query.toString();
+  }
+
 }
diff --git a/ql/src/test/queries/clientpositive/jdbc_handler.q b/ql/src/test/queries/clientpositive/jdbc_handler.q
index d086735..2c7e3fd 100644
--- a/ql/src/test/queries/clientpositive/jdbc_handler.q
+++ b/ql/src/test/queries/clientpositive/jdbc_handler.q
@@ -91,3 +91,76 @@ from
 order by a,b;
 
 describe tables;
+
+-- Tests for inserting to jdbc data source
+
+FROM src
+
+SELECT dboutput ( 'jdbc:derby:;databaseName=${system:test.tmp.dir}/test_insert_derby_as_external_table_db;create=true','','',
+'CREATE TABLE INSERT_TO_DERBY_TABLE (a BOOLEAN, b  INTEGER, c BIGINT, d FLOAT, e DOUBLE, f DATE, g VARCHAR(27),
+                                  h VARCHAR(27), i CHAR(2), j TIMESTAMP, k DECIMAL(5,4), l SMALLINT, m SMALLINT)' )
+
+limit 1;
+
+CREATE EXTERNAL TABLE insert_to_ext_derby_table
+(
+ a BOOLEAN,
+ b INTEGER,
+ c BIGINT,
+ d FLOAT,
+ e DOUBLE,
+ f DATE,
+ g VARCHAR(27),
+ h STRING,
+ i CHAR(2),
+ j TIMESTAMP,
+ k DECIMAL(5,4),
+ l TINYINT,
+ m SMALLINT
+ )
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+                "hive.sql.database.type" = "DERBY",
+                "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+                "hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${system:test.tmp.dir}/test_insert_derby_as_external_table_db;create=true;collation=TERRITORY_BASED:PRIMARY",
+                "hive.sql.dbcp.username" = "APP",
+                "hive.sql.dbcp.password" = "mine",
+                "hive.sql.table" = "INSERT_TO_DERBY_TABLE",
+                "hive.sql.dbcp.maxActive" = "1"
+);
+
+CREATE TABLE test_insert_tbl
+(
+ a BOOLEAN,
+ b INTEGER,
+ c BIGINT,
+ d FLOAT,
+ e DOUBLE,
+ f DATE,
+ g VARCHAR(27),
+ h STRING,
+ i CHAR(2),
+ j TIMESTAMP,
+ k DECIMAL(5,4),
+ l TINYINT,
+ m SMALLINT
+ );
+
+INSERT INTO test_insert_tbl VALUES(true, 342, 8900, 9.63, 1099.9999, '2019-04-11', 'abcd', 'efgh', 'k', '2019-05-01 00:00:00', 1.8899, 1, 2);
+
+-- Inserting single row of data
+
+INSERT INTO insert_to_ext_derby_table VALUES(true, 10, 100, 2.63, 999.9999, '2019-01-11', 'test', 'test1', 'z', '2019-01-01 00:00:00', 1.7899, 1, 2);
+
+INSERT INTO insert_to_ext_derby_table select * from test_insert_tbl;
+select * from insert_to_ext_derby_table;
+
+INSERT INTO insert_to_ext_derby_table VALUES(false, 324, 53465, 2.6453, 599.9999, '2019-04-11', 'fgeg', 'asda', 'k', '2019-03-01 10:00:00', 1.7899, 1, 2);
+
+-- Inserting multiple row of data
+INSERT INTO insert_to_ext_derby_table VALUES(false, 10, 100, 2.63, 999.9999, '2019-11-11', 'test', 'test1', 'a', '2019-01-01 00:00:00', 1.7899, 1, 2),
+                                         (true, 100, 1000, 2.632, 9999.99999, '2019-12-11', 'test_1', 'test1_1', 'b', '2019-02-01 01:00:01', 5.7899, 3, 4),
+                                         (false, 10, 999, 23.632, 99999.99999, '2019-09-11', 'test_2', 'test1_2', 'c', '2019-03-01 11:00:01', 9.7899, 5, 6);
+
+INSERT INTO insert_to_ext_derby_table select * from test_insert_tbl;
+select * from insert_to_ext_derby_table;
diff --git a/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out b/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
index 3c043f5..0d11d16 100644
--- a/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
+++ b/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
@@ -360,3 +360,210 @@ db_id               	bigint              	from deserializer
 name                	string              	from deserializer   
 type                	string              	from deserializer   
 #### A masked pattern was here ####
+PREHOOK: query: FROM src
+
+#### A masked pattern was here ####
+'CREATE TABLE INSERT_TO_DERBY_TABLE (a BOOLEAN, b  INTEGER, c BIGINT, d FLOAT, e DOUBLE, f DATE, g VARCHAR(27),
+                                  h VARCHAR(27), i CHAR(2), j TIMESTAMP, k DECIMAL(5,4), l SMALLINT, m SMALLINT)' )
+
+limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: FROM src
+
+#### A masked pattern was here ####
+'CREATE TABLE INSERT_TO_DERBY_TABLE (a BOOLEAN, b  INTEGER, c BIGINT, d FLOAT, e DOUBLE, f DATE, g VARCHAR(27),
+                                  h VARCHAR(27), i CHAR(2), j TIMESTAMP, k DECIMAL(5,4), l SMALLINT, m SMALLINT)' )
+
+limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0
+PREHOOK: query: CREATE EXTERNAL TABLE insert_to_ext_derby_table
+(
+ a BOOLEAN,
+ b INTEGER,
+ c BIGINT,
+ d FLOAT,
+ e DOUBLE,
+ f DATE,
+ g VARCHAR(27),
+ h STRING,
+ i CHAR(2),
+ j TIMESTAMP,
+ k DECIMAL(5,4),
+ l TINYINT,
+ m SMALLINT
+ )
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+                "hive.sql.database.type" = "DERBY",
+                "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+#### A masked pattern was here ####
+                "hive.sql.dbcp.username" = "APP",
+                "hive.sql.dbcp.password" = "mine",
+                "hive.sql.table" = "INSERT_TO_DERBY_TABLE",
+                "hive.sql.dbcp.maxActive" = "1"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@insert_to_ext_derby_table
+POSTHOOK: query: CREATE EXTERNAL TABLE insert_to_ext_derby_table
+(
+ a BOOLEAN,
+ b INTEGER,
+ c BIGINT,
+ d FLOAT,
+ e DOUBLE,
+ f DATE,
+ g VARCHAR(27),
+ h STRING,
+ i CHAR(2),
+ j TIMESTAMP,
+ k DECIMAL(5,4),
+ l TINYINT,
+ m SMALLINT
+ )
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+                "hive.sql.database.type" = "DERBY",
+                "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+#### A masked pattern was here ####
+                "hive.sql.dbcp.username" = "APP",
+                "hive.sql.dbcp.password" = "mine",
+                "hive.sql.table" = "INSERT_TO_DERBY_TABLE",
+                "hive.sql.dbcp.maxActive" = "1"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@insert_to_ext_derby_table
+PREHOOK: query: CREATE TABLE test_insert_tbl
+(
+ a BOOLEAN,
+ b INTEGER,
+ c BIGINT,
+ d FLOAT,
+ e DOUBLE,
+ f DATE,
+ g VARCHAR(27),
+ h STRING,
+ i CHAR(2),
+ j TIMESTAMP,
+ k DECIMAL(5,4),
+ l TINYINT,
+ m SMALLINT
+ )
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_insert_tbl
+POSTHOOK: query: CREATE TABLE test_insert_tbl
+(
+ a BOOLEAN,
+ b INTEGER,
+ c BIGINT,
+ d FLOAT,
+ e DOUBLE,
+ f DATE,
+ g VARCHAR(27),
+ h STRING,
+ i CHAR(2),
+ j TIMESTAMP,
+ k DECIMAL(5,4),
+ l TINYINT,
+ m SMALLINT
+ )
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_insert_tbl
+PREHOOK: query: INSERT INTO test_insert_tbl VALUES(true, 342, 8900, 9.63, 1099.9999, '2019-04-11', 'abcd', 'efgh', 'k', '2019-05-01 00:00:00', 1.8899, 1, 2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@test_insert_tbl
+POSTHOOK: query: INSERT INTO test_insert_tbl VALUES(true, 342, 8900, 9.63, 1099.9999, '2019-04-11', 'abcd', 'efgh', 'k', '2019-05-01 00:00:00', 1.8899, 1, 2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@test_insert_tbl
+POSTHOOK: Lineage: test_insert_tbl.a SCRIPT []
+POSTHOOK: Lineage: test_insert_tbl.b SCRIPT []
+POSTHOOK: Lineage: test_insert_tbl.c SCRIPT []
+POSTHOOK: Lineage: test_insert_tbl.d SCRIPT []
+POSTHOOK: Lineage: test_insert_tbl.e SCRIPT []
+POSTHOOK: Lineage: test_insert_tbl.f SCRIPT []
+POSTHOOK: Lineage: test_insert_tbl.g SCRIPT []
+POSTHOOK: Lineage: test_insert_tbl.h SCRIPT []
+POSTHOOK: Lineage: test_insert_tbl.i SCRIPT []
+POSTHOOK: Lineage: test_insert_tbl.j SCRIPT []
+POSTHOOK: Lineage: test_insert_tbl.k SCRIPT []
+POSTHOOK: Lineage: test_insert_tbl.l SCRIPT []
+POSTHOOK: Lineage: test_insert_tbl.m SCRIPT []
+PREHOOK: query: INSERT INTO insert_to_ext_derby_table VALUES(true, 10, 100, 2.63, 999.9999, '2019-01-11', 'test', 'test1', 'z', '2019-01-01 00:00:00', 1.7899, 1, 2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@insert_to_ext_derby_table
+POSTHOOK: query: INSERT INTO insert_to_ext_derby_table VALUES(true, 10, 100, 2.63, 999.9999, '2019-01-11', 'test', 'test1', 'z', '2019-01-01 00:00:00', 1.7899, 1, 2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@insert_to_ext_derby_table
+PREHOOK: query: INSERT INTO insert_to_ext_derby_table select * from test_insert_tbl
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_insert_tbl
+PREHOOK: Output: default@insert_to_ext_derby_table
+POSTHOOK: query: INSERT INTO insert_to_ext_derby_table select * from test_insert_tbl
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_insert_tbl
+POSTHOOK: Output: default@insert_to_ext_derby_table
+PREHOOK: query: select * from insert_to_ext_derby_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_to_ext_derby_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from insert_to_ext_derby_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_to_ext_derby_table
+#### A masked pattern was here ####
+true	10	100	2.63	999.9999	2019-01-11	test	test1	z 	2019-01-01 00:00:00	1.7899	1	2
+true	342	8900	9.63	1099.9999	2019-04-11	abcd	efgh	k 	2019-05-01 00:00:00	1.8899	1	2
+PREHOOK: query: INSERT INTO insert_to_ext_derby_table VALUES(false, 324, 53465, 2.6453, 599.9999, '2019-04-11', 'fgeg', 'asda', 'k', '2019-03-01 10:00:00', 1.7899, 1, 2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@insert_to_ext_derby_table
+POSTHOOK: query: INSERT INTO insert_to_ext_derby_table VALUES(false, 324, 53465, 2.6453, 599.9999, '2019-04-11', 'fgeg', 'asda', 'k', '2019-03-01 10:00:00', 1.7899, 1, 2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@insert_to_ext_derby_table
+PREHOOK: query: INSERT INTO insert_to_ext_derby_table VALUES(false, 10, 100, 2.63, 999.9999, '2019-11-11', 'test', 'test1', 'a', '2019-01-01 00:00:00', 1.7899, 1, 2),
+                                         (true, 100, 1000, 2.632, 9999.99999, '2019-12-11', 'test_1', 'test1_1', 'b', '2019-02-01 01:00:01', 5.7899, 3, 4),
+                                         (false, 10, 999, 23.632, 99999.99999, '2019-09-11', 'test_2', 'test1_2', 'c', '2019-03-01 11:00:01', 9.7899, 5, 6)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@insert_to_ext_derby_table
+POSTHOOK: query: INSERT INTO insert_to_ext_derby_table VALUES(false, 10, 100, 2.63, 999.9999, '2019-11-11', 'test', 'test1', 'a', '2019-01-01 00:00:00', 1.7899, 1, 2),
+                                         (true, 100, 1000, 2.632, 9999.99999, '2019-12-11', 'test_1', 'test1_1', 'b', '2019-02-01 01:00:01', 5.7899, 3, 4),
+                                         (false, 10, 999, 23.632, 99999.99999, '2019-09-11', 'test_2', 'test1_2', 'c', '2019-03-01 11:00:01', 9.7899, 5, 6)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@insert_to_ext_derby_table
+PREHOOK: query: INSERT INTO insert_to_ext_derby_table select * from test_insert_tbl
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_insert_tbl
+PREHOOK: Output: default@insert_to_ext_derby_table
+POSTHOOK: query: INSERT INTO insert_to_ext_derby_table select * from test_insert_tbl
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_insert_tbl
+POSTHOOK: Output: default@insert_to_ext_derby_table
+PREHOOK: query: select * from insert_to_ext_derby_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_to_ext_derby_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from insert_to_ext_derby_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_to_ext_derby_table
+#### A masked pattern was here ####
+true	10	100	2.63	999.9999	2019-01-11	test	test1	z 	2019-01-01 00:00:00	1.7899	1	2
+true	342	8900	9.63	1099.9999	2019-04-11	abcd	efgh	k 	2019-05-01 00:00:00	1.8899	1	2
+false	324	53465	2.6453	599.9999	2019-04-11	fgeg	asda	k 	2019-03-01 10:00:00	1.7899	1	2
+false	10	100	2.63	999.9999	2019-11-11	test	test1	a 	2019-01-01 00:00:00	1.7899	1	2
+true	100	1000	2.632	9999.99999	2019-12-11	test_1	test1_1	b 	2019-02-01 01:00:01	5.7899	3	4
+false	10	999	23.632	99999.99999	2019-09-11	test_2	test1_2	c 	2019-03-01 11:00:01	9.7899	5	6
+true	342	8900	9.63	1099.9999	2019-04-11	abcd	efgh	k 	2019-05-01 00:00:00	1.8899	1	2