You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by al...@apache.org on 2016/06/29 14:37:02 UTC

[03/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java
new file mode 100644
index 0000000..614c171
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java
@@ -0,0 +1,563 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.service.pig;
+
+import java.nio.charset.Charset;
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.Scanner;
+import java.io.*;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+import org.apache.ambari.view.huetoambarimigration.model.*;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+
+public class PigJobImpl {
+
+  static final Logger logger = Logger.getLogger(PigJobImpl.class);
+
+  private static String readAll(Reader rd) throws IOException {
+    StringBuilder sb = new StringBuilder();
+    int cp;
+    while ((cp = rd.read()) != -1) {
+      sb.append((char) cp);
+    }
+    return sb.toString();
+  }
+
+  public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException {
+    Date dNow = new Date();
+    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
+    String currentDate = ft.format(dNow);
+    XMLOutputter xmlOutput = new XMLOutputter();
+    xmlOutput.setFormat(Format.getPrettyFormat());
+    File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
+    if (xmlfile.exists()) {
+      String iteration = Integer.toString(i + 1);
+      SAXBuilder builder = new SAXBuilder();
+      Document doc;
+      try {
+        doc = (Document) builder.build(xmlfile);
+        Element rootNode = doc.getRootElement();
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+        rootNode.addContent(record);
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+      } catch (JDOMException e) {
+
+        logger.error("Jdom Exception: " , e);
+      }
+
+
+    } else {
+      // create
+      try {
+        String iteration = Integer.toString(i + 1);
+        Element revertrecord = new Element("RevertChangePage");
+        Document doc = new Document(revertrecord);
+        doc.setRootElement(revertrecord);
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+        doc.getRootElement().addContent(record);
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+      } catch (IOException io) {
+        logger.error("Jdom Exception: " , io);
+      }
+
+    }
+
+  }
+
+  public int fetchMaxIdforPigJob(String driverName, Connection c, int id) throws SQLException {
+
+    String ds_id = null;
+    Statement stmt = null;
+    ResultSet rs = null;
+
+    stmt = c.createStatement();
+
+    if (driverName.contains("postgresql")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigjob_" + id + ";");
+    } else if (driverName.contains("mysql")) {
+      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_PIGJOB_" + id + ";");
+    } else if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigjob_" + id);
+    }
+
+    while (rs.next()) {
+      ds_id = rs.getString("max");
+
+    }
+
+    int num;
+    if (ds_id == null) {
+      num = 1;
+    } else {
+      num = Integer.parseInt(ds_id);
+    }
+
+    return num;
+
+  }
+
+  public int fetchInstanceTablename(String driverName, Connection c, String instance) throws SQLException {
+
+
+    String ds_id = new String();
+    int id = 0;
+    Statement stmt = null;
+    stmt = c.createStatement();
+
+    ResultSet rs = null;
+    if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.jobs.models.PigJob' and view_instance_name='" + instance + "'");
+    } else {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.jobs.models.PigJob' and view_instance_name='" + instance + "';");
+    }
+    while (rs.next()) {
+      id = rs.getInt("id");
+
+    }
+
+    return id;
+  }
+
+  public void insertRowPigJob(String driverName, String dirname, int maxcountforpigjob, String time, String time2, long epochtime, String title, Connection c, int id, String status, String instance, int i) throws SQLException, IOException {
+
+    String epochtime1 = Long.toString(epochtime);
+    String ds_id = new String();
+    Statement stmt = null;
+
+    stmt = c.createStatement();
+    String sql = "";
+    String revsql = "";
+
+    if (driverName.contains("mysql")) {
+      sql = "INSERT INTO DS_PIGJOB_" + id + " values ('" + maxcountforpigjob + "'," + epochtime1 + ",0,'','f','','','admin',0,'" + dirname + "script.pig','','" + maxcountforpigjob + "','','','" + status + "','" + dirname + "','','" + title + "');";
+      revsql = "delete from  DS_PIGJOB_" + id + " where ds_id='" + maxcountforpigjob + "';";
+
+    } else if (driverName.contains("postgresql")) {
+      sql = "INSERT INTO ds_pigjob_" + id + " values ('" + maxcountforpigjob + "'," + epochtime1 + ",0,'','f','','','admin',0,'" + dirname + "script.pig','','" + maxcountforpigjob + "','','','" + status + "','" + dirname + "','','" + title + "');";
+      revsql = "delete from  ds_pigjob_" + id + " where ds_id='" + maxcountforpigjob + "';";
+
+    } else if (driverName.contains("oracle")) {
+      sql = "INSERT INTO ds_pigjob_" + id + " values ('" + maxcountforpigjob + "'," + epochtime1 + ",0,'','f','','','admin',0,'" + dirname + "script.pig','','" + maxcountforpigjob + "','','','" + status + "','" + dirname + "','','" + title + "')";
+      revsql = "delete from  ds_pigjob_" + id + " where ds_id='" + maxcountforpigjob + "'";
+
+    }
+
+    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+
+    stmt.executeUpdate(sql);
+
+  }
+
+  public long getEpochTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+    return epoch;
+
+  }
+
+  public String getTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute;
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+    return s;
+
+  }
+
+  public String getTimeInorder() throws ParseException {
+    SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.msssss +00:00:00");//dd/MM/yyyy
+    Date now = new Date();
+    String strDate = sdfDate.format(now);
+    return strDate;
+  }
+
+  public ArrayList<PojoPig> fetchFromHueDB(String username, String startdate, String endtime, Connection connection) throws ClassNotFoundException, IOException {
+    int id = 0;
+    int i = 0;
+    String[] query = new String[100];
+    ArrayList<PojoPig> pigjobarraylist = new ArrayList<PojoPig>();
+    try {
+      Statement statement = connection.createStatement();
+      ResultSet rs1 = null;
+      if (username.equals("all")) {
+      } else {
+        ResultSet rs = statement
+          .executeQuery("select id from auth_user where username='"
+            + username + "';");
+        while (rs.next()) {
+
+          id = rs.getInt("id");
+
+        }
+
+      }
+
+      if (startdate.equals("") && endtime.equals("")) {
+        if (username.equals("all")) {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job;");
+
+        } else {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + ";");
+        }
+
+      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where start_time >= date('" + startdate + "') AND start_time <= date('" + endtime + "');");
+        } else {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + " AND start_time >= date('" + startdate + "') AND start_time <= date('" + endtime + "');");
+        }
+
+      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
+        if (username.equals("all")) {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where start_time >= date('" + startdate + "');");
+        } else {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + " AND start_time >= date('" + startdate + "');");
+        }
+
+      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where start_time <= date('" + endtime + "');");
+        } else {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + " AND start_time <= date('" + endtime + "');");
+        }
+
+      }
+
+      while (rs1.next()) {
+        PojoPig pigjjobobject = new PojoPig();
+
+        int runstatus = rs1.getInt("status");
+
+        if (runstatus == 1) {
+          pigjjobobject.setStatus("RUNNING");
+        } else if (runstatus == 2) {
+          pigjjobobject.setStatus("SUCCEEDED");
+        } else if (runstatus == 3) {
+          pigjjobobject.setStatus("SUBMIT_FAILED");
+        } else if (runstatus == 4) {
+          pigjjobobject.setStatus("KILLED");
+        }
+        String title = rs1.getString("script_title");
+
+
+        pigjjobobject.setTitle(title);
+        String dir = rs1.getString("statusdir");
+        pigjjobobject.setDir(dir);
+        Date created_data = rs1.getDate("start_time");
+        pigjjobobject.setDt(created_data);
+
+        pigjobarraylist.add(pigjjobobject);
+
+        i++;
+      }
+
+
+    } catch (SQLException e) {
+      logger.error("Sqlexception: " , e);
+    } finally {
+      try {
+        if (connection != null)
+          connection.close();
+      } catch (SQLException e) {
+        logger.error("Sqlexception in closing the connection: " , e);
+
+      }
+    }
+
+    return pigjobarraylist;
+
+  }
+
+  public void createDirPigJob(final String dir, final String namenodeuri) throws IOException,
+    URISyntaxException {
+
+    try {
+      UserGroupInformation ugi = UserGroupInformation
+        .createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          Configuration conf = new Configuration();
+          conf.set("fs.hdfs.impl",
+            org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+          );
+          conf.set("fs.file.impl",
+            org.apache.hadoop.fs.LocalFileSystem.class.getName()
+          );
+          conf.set("fs.defaultFS", namenodeuri);
+          conf.set("hadoop.job.ugi", "hdfs");
+
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          fs.mkdirs(src);
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception: " , e);
+    }
+  }
+
+  /**/
+  public void createDirPigJobSecured(final String dir, final String namenodeuri) throws IOException,
+    URISyntaxException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          fs.mkdirs(src);
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception: " , e);
+    }
+  }
+
+  /**/
+  public void copyFileBetweenHdfs(final String source, final String dest, final String nameNodeuriAmbari, final String nameNodeuriHue)
+    throws IOException {
+
+    try {
+      UserGroupInformation ugi = UserGroupInformation
+        .createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          Configuration confAmbari = new Configuration();
+          confAmbari.set("fs.defaultFS", nameNodeuriAmbari);
+          confAmbari.set("hadoop.job.ugi", "hdfs");
+          FileSystem fileSystemAmbari = FileSystem.get(confAmbari);
+
+          Configuration confHue = new Configuration();
+          confHue.set("fs.defaultFS", nameNodeuriAmbari);
+          confHue.set("hadoop.job.ugi", "hdfs");
+          FileSystem fileSystemHue = FileSystem.get(confHue);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path1 = new Path(source);
+          FSDataInputStream in1 = fileSystemHue.open(path1);
+
+          Path path = new Path(dest1);
+          if (fileSystemAmbari.exists(path)) {
+
+          }
+
+          FSDataOutputStream out = fileSystemAmbari.create(path);
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in1.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in1.close();
+          out.close();
+          fileSystemAmbari.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception: " , e);
+    }
+
+  }
+
+  /**/
+  public void copyFileBetweenHdfsSecured(final String source, final String dest, final String nameNodeuriAmbari, final String nameNodeuriHue)
+    throws IOException {
+
+    try {
+
+      final Configuration confAmbari = new Configuration();
+      confAmbari.set("fs.defaultFS", nameNodeuriAmbari);
+      confAmbari.set("hadoop.job.ugi", "hdfs");
+
+      final Configuration confHue = new Configuration();
+      confHue.set("fs.defaultFS", nameNodeuriAmbari);
+      confHue.set("hadoop.job.ugi", "hdfs");
+
+      confAmbari.set("hadoop.security.authentication", "Kerberos");
+      confHue.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation ugi = UserGroupInformation
+        .createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+
+          FileSystem fileSystemAmbari = FileSystem.get(confAmbari);
+
+          FileSystem fileSystemHue = FileSystem.get(confHue);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path1 = new Path(source);
+          FSDataInputStream in1 = fileSystemHue.open(path1);
+
+          Path path = new Path(dest1);
+          if (fileSystemAmbari.exists(path)) {
+
+          }
+          FSDataOutputStream out = fileSystemAmbari.create(path);
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in1.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in1.close();
+          out.close();
+          fileSystemAmbari.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception: " , e);
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigScriptImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigScriptImpl.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigScriptImpl.java
new file mode 100644
index 0000000..e3c668f
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigScriptImpl.java
@@ -0,0 +1,600 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.service.pig;
+
+import java.nio.charset.Charset;
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.Scanner;
+import java.io.*;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+import org.apache.ambari.view.huetoambarimigration.model.*;
+
+
+public class PigScriptImpl {
+
+  static final Logger logger = Logger.getLogger(PigJobImpl.class);
+
+  private static String readAll(Reader rd) throws IOException {
+    StringBuilder sb = new StringBuilder();
+    int cp;
+    while ((cp = rd.read()) != -1) {
+      sb.append((char) cp);
+    }
+    return sb.toString();
+  }
+
+  public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException {
+
+    Date dNow = new Date();
+    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
+    String currentDate = ft.format(dNow);
+
+    XMLOutputter xmlOutput = new XMLOutputter();
+
+    xmlOutput.setFormat(Format.getPrettyFormat());
+
+    File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
+
+    if (xmlfile.exists()) {
+      String iteration = Integer.toString(i + 1);
+      SAXBuilder builder = new SAXBuilder();
+      Document doc;
+      try {
+        doc = (Document) builder.build(xmlfile);
+
+        Element rootNode = doc.getRootElement();
+
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+
+        rootNode.addContent(record);
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+
+      } catch (JDOMException e) {
+        logger.error("JDOMException: " , e);
+      }
+
+
+    } else {
+      // create
+      try {
+        String iteration = Integer.toString(i + 1);
+        Element revertrecord = new Element("RevertChangePage");
+        Document doc = new Document(revertrecord);
+        doc.setRootElement(revertrecord);
+
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+
+        doc.getRootElement().addContent(record);
+
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+
+      } catch (IOException io) {
+        logger.error("IOException: " , io);
+
+      }
+
+    }
+
+
+  }
+
+  public int fetchInstanceTablenamePigScript(String driverName, Connection c, String instance) throws SQLException {
+
+    String ds_id = new String();
+    int id = 0;
+    Statement stmt = null;
+
+    stmt = c.createStatement();
+
+    ResultSet rs = null;
+
+    if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.scripts.models.PigScript' and view_instance_name='" + instance + "'");
+    } else {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.scripts.models.PigScript' and view_instance_name='" + instance + "';");
+    }
+
+    while (rs.next()) {
+      id = rs.getInt("id");
+
+    }
+
+    return id;
+
+  }
+
+  public int fetchmaxIdforPigSavedScript(String driverName, Connection c, int id) throws SQLException {
+
+    String ds_id = null;
+    Statement stmt = null;
+
+    stmt = c.createStatement();
+    ResultSet rs = null;
+
+    if (driverName.contains("postgresql")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigscript_" + id + ";");
+    } else if (driverName.contains("mysql")) {
+      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_PIGSCRIPT_" + id + ";");
+    } else if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigscript_" + id + "");
+    }
+
+    while (rs.next()) {
+      ds_id = rs.getString("max");
+    }
+
+    int num;
+    if (ds_id == null) {
+      num = 0;
+    } else {
+      num = Integer.parseInt(ds_id);
+    }
+
+    return num;
+  }
+
+  public void insertRowForPigScript(String driverName, String dirname, int maxcountforpigjob, int maxcount, String time, String time2, long epochtime, String title, Connection c, int id, String instance, int i) throws SQLException, IOException {
+
+    String maxcount1 = Integer.toString(maxcount);
+    String epochtime1 = Long.toString(epochtime);
+    String ds_id = new String();
+    Statement stmt = null;
+    String sql2 = "";
+    String revsql = "";
+
+    stmt = c.createStatement();
+
+    if (driverName.contains("mysql")) {
+      sql2 = "INSERT INTO DS_PIGSCRIPT_" + id + " values ('" + maxcount1 + "','1970-01-17 20:28:55.586000 +00:00:00',0,'admin','" + dirname + "','','','" + title + "');";
+      revsql = "delete from  DS_PIGSCRIPT_" + id + " where ds_id='" + maxcount1 + "';";
+
+    } else if (driverName.contains("postgresql")) {
+      sql2 = "INSERT INTO ds_pigscript_" + id + " values ('" + maxcount1 + "','1970-01-17 20:28:55.586000 +00:00:00','f','admin','" + dirname + "','','','" + title + "');";
+      revsql = "delete from  ds_pigscript_" + id + " where ds_id='" + maxcount1 + "';";
+
+    } else if (driverName.contains("oracle")) {
+      sql2 = "INSERT INTO ds_pigscript_" + id + " values ('" + maxcount1 + "','1970-01-17 20:28:55.586000 +00:00:00','f','admin','" + dirname + "','','','" + title + "')";
+      revsql = "delete from  ds_pigscript_" + id + " where ds_id='" + maxcount1 + "'";
+
+    }
+
+    stmt.executeUpdate(sql2);
+
+    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+
+  }
+
+
+  public long getEpochTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+
+    return epoch;
+
+  }
+
+
+  public String getTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+
+    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute;
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+
+    return s;
+
+  }
+
+
+  public String getTimeInorder() throws ParseException {
+    SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.msssss +00:00:00");//dd/MM/yyyy
+    Date now = new Date();
+    String strDate = sdfDate.format(now);
+    return strDate;
+  }
+
+
+  public ArrayList<PojoPig> fetchFromHueDatabase(String username, String startdate, String endtime, Connection connection, String driverName) throws ClassNotFoundException, IOException {
+    int id = 0;
+    int i = 0;
+    ResultSet rs1 = null;
+    String[] query = new String[100];
+    ArrayList<PojoPig> pigArrayList = new ArrayList<PojoPig>();
+    try {
+
+      Statement statement = connection.createStatement();
+
+      if (username.equals("all")) {
+      } else {
+        ResultSet rs = statement
+          .executeQuery("select id from auth_user where username='"
+            + username + "';");
+        while (rs.next()) {
+          id = rs.getInt("id");
+        }
+
+      }
+
+
+      if (startdate.equals("") && endtime.equals("")) {
+        if (username.equals("all")) {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=true;");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1;");
+          }
+
+        } else {
+
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + ";");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + ";");
+          }
+
+        }
+
+      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
+          }
+
+        } else {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + " AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + " AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
+          }
+
+        }
+
+      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
+        if (username.equals("all")) {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND date_created >= date('" + startdate + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND date_created >= date('" + startdate + "');");
+          }
+
+        } else {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + " AND date_created >= date('" + startdate + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + " AND date_created >= date('" + startdate + "');");
+          }
+
+        }
+
+      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND date_created <= date('" + endtime + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND date_created <= date('" + endtime + "');");
+          }
+
+        } else {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + " AND date_created <= date('" + endtime + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + " AND date_created <= date('" + endtime + "');");
+          }
+
+        }
+
+      }
+      // rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id ="+id+" AND date_created BETWEEN '"+ startdate +"' AND '"  +endtime +"';");
+      while (rs1.next()) {
+        PojoPig pojopig = new PojoPig();
+
+        String script = rs1.getString("pig_script");
+        String title = rs1.getString("title");
+        Date created_data = rs1.getDate("date_created");
+        pojopig.setDt(created_data);
+        pojopig.setScript(script);
+        pojopig.setTitle(title);
+
+        pigArrayList.add(pojopig);
+        i++;
+      }
+
+
+    } catch (SQLException e) {
+      logger.error("SQLException" , e);
+    } finally {
+      try {
+        if (connection != null)
+          connection.close();
+      } catch (SQLException e) {
+        logger.error("SQLException" , e);
+      }
+    }
+
+    return pigArrayList;
+
+  }
+
+  public void writetPigScripttoLocalFile(String script, String title, Date createddate, String homedir, String filename2) {
+    try {
+      logger.info(homedir + filename2);
+      File file = new File(homedir + filename2);
+
+      if (!file.exists()) {
+        file.createNewFile();
+      }
+
+      FileWriter fw = new FileWriter(file.getAbsoluteFile());
+      BufferedWriter bw = new BufferedWriter(fw);
+      bw.write(script);
+      bw.close();
+
+
+    } catch (IOException e) {
+
+      logger.error("IOException" , e);
+    }
+
+  }
+
+  public void deletePigScriptLocalFile(String homedir, String filename2) {
+    try{
+
+      File file = new File(homedir + filename2);
+
+      if(file.delete()){
+        logger.info("Temproray file deleted");
+      }else{
+        logger.info("Temproray file delete failed");
+      }
+
+    }catch(Exception e){
+
+     logger.error("File Exception: ",e);
+
+    }
+
+  }
+
+  public void putFileinHdfs(final String source, final String dest, final String namenodeuri)
+    throws IOException {
+
+    try {
+      UserGroupInformation ugi = UserGroupInformation
+        .createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          Configuration conf = new Configuration();
+          conf.set("fs.hdfs.impl",
+            org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+          );
+          conf.set("fs.file.impl",
+            org.apache.hadoop.fs.LocalFileSystem.class.getName()
+          );
+          conf.set("fs.defaultFS", namenodeuri);
+          conf.set("hadoop.job.ugi", "hdfs");
+          FileSystem fileSystem = FileSystem.get(conf);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path = new Path(dest1);
+          if (fileSystem.exists(path)) {
+
+          }
+          FSDataOutputStream out = fileSystem.create(path);
+
+          InputStream in = new BufferedInputStream(
+            new FileInputStream(new File(source)));
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in.close();
+          out.close();
+          fileSystem.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs Exception: " , e);
+    }
+
+  }
+
+  public void putFileinHdfsSecured(final String source, final String dest, final String namenodeuri)
+    throws IOException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          FileSystem fileSystem = FileSystem.get(conf);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path = new Path(dest1);
+          if (fileSystem.exists(path)) {
+
+          }
+          //	Path pathsource = new Path(source);
+          FSDataOutputStream out = fileSystem.create(path);
+
+          InputStream in = new BufferedInputStream(
+            new FileInputStream(new File(source)));
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in.close();
+          out.close();
+          fileSystem.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs Exception: " , e);
+
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/BadRequestFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/BadRequestFormattedException.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/BadRequestFormattedException.java
deleted file mode 100644
index 3edacb2..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/BadRequestFormattedException.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.utils;
-
-public class BadRequestFormattedException extends ServiceFormattedException {
-  private final static int STATUS = 400;
-
-  public BadRequestFormattedException(String message, Throwable exception) {
-    super(message, exception, STATUS);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/FilePaginator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/FilePaginator.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/FilePaginator.java
deleted file mode 100644
index 64a406d..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/FilePaginator.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.utils;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.hadoop.fs.FSDataInputStream;
-
-import java.io.IOException;
-import java.nio.charset.Charset;
-import java.util.Arrays;
-
-import static java.lang.Math.ceil;
-
-/**
- * Pagination for HDFS file implementation
- */
-public class FilePaginator {
-  private static int PAGE_SIZE = 1*1024*1024;  // 1MB
-
-  private String filePath;
-  private ViewContext context;
-
-  /**
-   * Constructor
-   * @param filePath Path to file on HDFS
-   * @param context View Context instance
-   */
-  public FilePaginator(String filePath, ViewContext context) {
-    this.filePath = filePath;
-    this.context = context;
-  }
-
-  /**
-   * Set page size
-   * @param PAGE_SIZE size
-   */
-  public static void setPageSize(int PAGE_SIZE) {
-    FilePaginator.PAGE_SIZE = PAGE_SIZE;
-  }
-
-  /**
-   * Get page count
-   * @return page count
-   * @throws IOException
-   * @throws InterruptedException
-   */
-
-  /**
-   * Read one page of size PAGE_SIZE
-   * @param page page index
-   * @return data in UTF-8
-   * @throws IOException
-   * @throws InterruptedException
-   */
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/MisconfigurationFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/MisconfigurationFormattedException.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/MisconfigurationFormattedException.java
deleted file mode 100644
index dad03ec..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/MisconfigurationFormattedException.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.utils;
-
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.util.HashMap;
-
-public class MisconfigurationFormattedException extends WebApplicationException {
-  private final static int STATUS = 500;
-  private final static String message = "Parameter \"%s\" is set to null";
-  private final static Logger LOG =
-      LoggerFactory.getLogger(MisconfigurationFormattedException.class);
-
-  public MisconfigurationFormattedException(String name) {
-    super(errorEntity(name));
-  }
-
-  protected static Response errorEntity(String name) {
-    HashMap<String, Object> response = new HashMap<String, Object>();
-    response.put("message", String.format(message, name));
-    response.put("trace", null);
-    response.put("status", STATUS);
-    return Response.status(STATUS).entity(new JSONObject(response)).type(MediaType.APPLICATION_JSON).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/NotFoundFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/NotFoundFormattedException.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/NotFoundFormattedException.java
deleted file mode 100644
index 00ab049..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/NotFoundFormattedException.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.utils;
-
-public class NotFoundFormattedException extends ServiceFormattedException {
-  private final static int STATUS = 404;
-
-  public NotFoundFormattedException(String message, Throwable exception) {
-    super(message, exception, STATUS);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/ServiceFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/ServiceFormattedException.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/ServiceFormattedException.java
deleted file mode 100644
index c49a18c..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/ServiceFormattedException.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.utils;
-
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.security.AccessControlException;
-import java.util.HashMap;
-
-public class ServiceFormattedException extends WebApplicationException {
-  private final static Logger LOG =
-      LoggerFactory.getLogger(ServiceFormattedException.class);
-
-  public ServiceFormattedException(Throwable e) {
-    super(errorEntity(null, e, suggestStatus(e)));
-  }
-
-  public ServiceFormattedException(String message) {
-    super(errorEntity(message, null, suggestStatus(null)));
-  }
-
-  public ServiceFormattedException(String message, Throwable exception) {
-    super(errorEntity(message, exception, suggestStatus(exception)));
-  }
-
-  public ServiceFormattedException(String message, Throwable exception, int status) {
-    super(errorEntity(message, exception, status));
-  }
-
-  private static int suggestStatus(Throwable exception) {
-    int status = 500;
-    if (exception == null) {
-      return status;
-    }
-    if (exception instanceof AccessControlException) {
-      status = 403;
-    }
-    return status;
-  }
-
-  protected static Response errorEntity(String message, Throwable e, int status) {
-    HashMap<String, Object> response = new HashMap<String, Object>();
-
-    String trace = null;
-
-    response.put("message", message);
-    if (e != null) {
-      trace = e.toString() + "\n\n";
-      StringWriter sw = new StringWriter();
-      e.printStackTrace(new PrintWriter(sw));
-      trace += sw.toString();
-
-      if (message == null) {
-        String innerMessage = e.getMessage();
-        String autoMessage;
-
-        if (innerMessage != null) {
-          autoMessage = String.format("%s [%s]", innerMessage, e.getClass().getSimpleName());
-        } else {
-          autoMessage = e.getClass().getSimpleName();
-        }
-        response.put("message", autoMessage);
-      }
-    }
-    response.put("trace", trace);
-    response.put("status", status);
-
-    if(message != null) {
-      LOG.error(message);
-    }
-    if(trace != null) {
-      LOG.error(trace);
-    }
-
-    Response.ResponseBuilder responseBuilder = Response.status(status).entity(new JSONObject(response)).type(MediaType.APPLICATION_JSON);
-    return responseBuilder.build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/WEB-INF/web.xml b/contrib/views/hueambarimigration/src/main/resources/WEB-INF/web.xml
new file mode 100644
index 0000000..8cca06c
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/WEB-INF/web.xml
@@ -0,0 +1,123 @@
+<?xml version="1.0" encoding="ISO-8859-1" ?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License. Kerberos, LDAP, Custom. Binary/Htt
+-->
+
+<web-app xmlns="http://java.sun.com/xml/ns/j2ee"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://java.sun.com/xml/ns/j2ee http://java.sun.com/xml/ns/j2ee/web-app_2_4.xsd"
+         version="2.4">
+
+  <display-name>Hello Servlet Application</display-name>
+  <welcome-file-list>
+    <welcome-file>index.jsp</welcome-file>
+  </welcome-file-list>
+  <description>
+    This is the hello servlet view application.
+  </description>
+
+  <servlet>
+    <servlet-name>HiveHistory</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.hive.HiveHistoryMigration</servlet-class>
+    <load-on-startup>1</load-on-startup>
+  </servlet>
+
+  <servlet>
+    <description>
+    </description>
+    <display-name>
+      SavedQuery
+    </display-name>
+    <servlet-name>SavedQuery</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.hive.HiveSavedQueryMigration</servlet-class>
+  </servlet>
+
+  <servlet>
+    <description>
+    </description>
+    <display-name>PigServlet</display-name>
+    <servlet-name>PigServlet</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.pig.PigScriptMigration</servlet-class>
+  </servlet>
+
+  <servlet>
+    <description>
+    </description>
+    <display-name>Configuration_check</display-name>
+    <servlet-name>Configuration_check</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ConfigurationCheck
+    </servlet-class>
+  </servlet>
+
+  <servlet>
+    <description>
+    </description>
+    <display-name>Pigjobsevlet</display-name>
+    <servlet-name>Pigjobsevlet</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.pig.PigJobMigration</servlet-class>
+  </servlet>
+
+  <servlet-mapping>
+    <servlet-name>HiveHistory</servlet-name>
+    <url-pattern>/HiveHistory</url-pattern>
+  </servlet-mapping>
+
+  <servlet-mapping>
+    <servlet-name>SavedQuery</servlet-name>
+    <url-pattern>/SavedQuery</url-pattern>
+  </servlet-mapping>
+
+  <servlet-mapping>
+    <servlet-name>PigServlet</servlet-name>
+    <url-pattern>/PigServlet</url-pattern>
+  </servlet-mapping>
+
+  <servlet-mapping>
+    <servlet-name>Configuration_check</servlet-name>
+    <url-pattern>/Configuration_check</url-pattern>
+  </servlet-mapping>
+
+  <servlet-mapping>
+    <servlet-name>Pigjobsevlet</servlet-name>
+    <url-pattern>/Pigjobsevlet</url-pattern>
+  </servlet-mapping>
+
+  <servlet>
+    <description></description>
+    <display-name>RevertChange</display-name>
+    <servlet-name>RevertChange</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.revertchange.RevertChange</servlet-class>
+  </servlet>
+
+  <servlet-mapping>
+    <servlet-name>RevertChange</servlet-name>
+    <url-pattern>/RevertChange</url-pattern>
+  </servlet-mapping>
+
+  <servlet>
+    <description></description>
+    <display-name>ProgressBarStatus</display-name>
+    <servlet-name>ProgressBarStatus</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus
+    </servlet-class>
+  </servlet>
+  <servlet-mapping>
+    <servlet-name>ProgressBarStatus</servlet-name>
+    <url-pattern>/ProgressBarStatus</url-pattern>
+  </servlet-mapping>
+
+</web-app>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/index.jsp
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/index.jsp b/contrib/views/hueambarimigration/src/main/resources/index.jsp
new file mode 100644
index 0000000..0ff1f36
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/index.jsp
@@ -0,0 +1,119 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<html>
+<head>
+<title>Hue to Ambari Migration</title>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<!-- Bootstrap CSS and bootstrap datepicker CSS used for styling the demo pages-->
+
+<link rel="stylesheet" href="css/bootstrap.css">
+
+
+
+
+<script src="js/jquery.js"></script>
+<script src="js/bootstrap.min.js"></script>
+
+
+
+
+
+<script type="text/javascript">
+	$(function() {
+		home();
+	});
+	function makeTabActive(tab) {
+		if (!tab) {
+			return;
+		}
+		$(".nav-tab").removeClass('active');
+		$(tab).parents('.nav-tab').addClass('active');
+	}
+	function loadconfiguration(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/checkconfiguration.jsp');
+	}
+	function revertchange(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/revertchange.jsp');
+	}
+	function home(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/homepage.jsp');
+	}
+	function loadhivehistory(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/hivehistoryquerymigration.jsp');
+	}
+	function loadpigscript(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/pigscriptsmigration.jsp');
+	}
+	function loadpigjobs(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/pigjobmigration.jsp');
+	}
+	function loadhivesaved(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/hivesavedquerymigration.jsp');
+	}
+</script>
+
+
+</head>
+
+<div class="container">
+	<!-- <div class="jumbotron" style="margin:10px">
+    <h1>Hue to Ambari Migration</h1>        
+  </div> -->
+
+
+
+<div class="row">
+	<nav class="navbar navbar-default">
+		<div class="container-fluid">
+			<ul class="nav navbar-nav">
+				<li class="nav-tab active"><a onclick="home(this)">Home</a></li>
+				<li class="nav-tab"><a onclick="loadconfiguration(this)">Check
+						configuration</a></li>
+				<li class="dropdown nav-tab"><a class="dropdown-toggle"
+					data-toggle="dropdown" href="#">Hive <span class="caret"></span></a>
+					<ul class="dropdown-menu">
+						<li><span onclick="loadhivesaved(this)">HiveSaved Query</span></li>
+						<li><span onclick="loadhivehistory(this)">HiveHistory</span></li>
+					</ul></li>
+				<li class="dropdown nav-tab"><a class="dropdown-toggle"
+					data-toggle="dropdown" href="#">Pig <span class="caret"></span></a>
+					<ul class="dropdown-menu">
+						<li><span onclick="loadpigscript(this)">Pigsavedscript</span></li>
+						<li><span onclick="loadpigjobs(this)">Pigjobs</span></li>
+					</ul></li>
+				<li class="nav-tab"><a onclick="revertchange(this)">Revert
+						the changes Page</a></li>
+			</ul>
+		</div>
+	</nav>
+</div>
+<div>
+	<div class="col-lg-2 main"></div>
+	<div class="col-lg-8 main">
+		<div id="maincenter11"></div>
+	</div>
+</div>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/.gitignore
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/.gitignore b/contrib/views/hueambarimigration/src/main/resources/ui/.gitignore
new file mode 100644
index 0000000..29aa6db
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/.gitignore
@@ -0,0 +1,33 @@
+# See http://help.github.com/ignore-files/ for more about ignoring files.
+
+# dependencies
+/node_modules
+/bower_components
+node/
+
+# misc
+
+/.idea
+
+# Numerous always-ignore extensions
+*.diff
+*.err
+*.orig
+*.log
+*.rej
+*.swo
+*.swp
+*.vi
+*~
+*.sass-cache
+
+# OS or Editor folders
+.DS_Store
+.cache
+.project
+.settings
+.tmproj
+dist
+nbproject
+Thumbs.db
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/bower.json
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/bower.json b/contrib/views/hueambarimigration/src/main/resources/ui/bower.json
new file mode 100644
index 0000000..ebbf28c
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/bower.json
@@ -0,0 +1,15 @@
+{
+  "name": "huetoambari",
+  "homepage": "https://github.com/apache/ambari",
+  "authors": [
+    "pradarttana"
+  ],
+  "description": "",
+  "main": "",
+  "license": "MIT",
+  "private": true,
+  "dependencies": {
+    "bootstrap": "^3.3.6",
+    "eonasdan-bootstrap-datetimepicker": "^4.17.37"
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/checkconfiguration.jsp
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/checkconfiguration.jsp b/contrib/views/hueambarimigration/src/main/resources/ui/checkconfiguration.jsp
new file mode 100644
index 0000000..b60ff41
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/checkconfiguration.jsp
@@ -0,0 +1,57 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+
+
+<!DOCTYPE html>
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+
+<script type="text/javascript">
+	$(document).ready(function() {
+		// we call the function
+		conf_check();
+	});
+	function conf_check() {
+		var url = "Configuration_check";
+
+		$.ajax({
+			url : url,
+			success : function(result) {
+				console.log("Got Result");
+				document.getElementById("areacenter").innerHTML = result;
+
+			}
+		});
+  }
+</script>	
+
+</head>
+<div class="panel panel-default">
+	<div class="panel-heading">
+		<h3>Checking configuration</h3>
+	</div>
+	<div class="panel-body">
+		<div id="areacenter">
+			<center>
+				<img src="image/updateimg.gif" alt="Smiley face">
+			</center>
+		</div>
+	</div>
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hivehistoryquerymigration.jsp
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hivehistoryquerymigration.jsp b/contrib/views/hueambarimigration/src/main/resources/ui/hivehistoryquerymigration.jsp
new file mode 100644
index 0000000..3de2fdf
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hivehistoryquerymigration.jsp
@@ -0,0 +1,229 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<!DOCTYPE html>
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<script type="text/javascript">
+
+	function validateAndSearch() {
+
+		var uname = document.getElementById("username");
+		uname = uname.options[uname.selectedIndex].value;
+		var startdate = document.getElementById('startdate').value;
+		var enddate = document.getElementById('enddate').value;
+		var instance = document.getElementById("instance");
+		instance = instance.options[instance.selectedIndex].value;
+
+		if (uname == "default") {
+			alert("Please select an username");
+		} else if (instance == "default") {
+			alert("Please select an instance name");
+		} else {
+			$('#progressbar').show();
+			$('#lines').hide();
+
+			historyquery(uname, startdate, enddate, instance);
+			interval = setInterval(loadpercentage, 1000 );
+		}
+	}
+
+	function loadpercentage() {
+		$.ajax({
+      url : "ProgressBarStatus",
+      success : function(result) {
+        $('#progressbarhivesavedquery').css('width', result);
+        console.log("Got the precentage completion "+ result);
+      },
+    });
+
+  }
+
+	function historyquery(uname, startdate, enddate, instance) {
+
+		var url = "HiveHistory?username=" + uname + "&startdate=" + startdate
+        				+ "&enddate=" + enddate + "&instance=" + instance;
+		$.ajax({
+			url : url,
+			success : function(result) {
+				console.log("Got Result");
+				document.getElementById("lines").innerHTML = result;
+				$('#progressbar').hide()
+				$('#lines').show()
+				clearInterval(interval);
+
+			}
+		});
+
+
+
+	}
+</script>
+<%@ page import="java.sql.*"%>
+<%@ page import="org.sqlite.*"%>
+<%@ page import="java.util.ArrayList"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase"%>
+<%@ page import="javax.servlet.ServletConfig"%>
+<%@ page import="javax.servlet.ServletContext"%>
+<%@ page import="org.apache.ambari.view.ViewContext"%>
+</head>
+<div class="row">
+	<%
+		ArrayList<String> username = new ArrayList<String>();
+		ArrayList<String> instancename = new ArrayList<String>();
+		int i;
+
+		Connection conn = null;
+
+		ServletContext context = request.getServletContext();
+        ViewContext view=(ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+        System.out.println(view.getProperties());
+
+		conn = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"),view.getProperties().get("huejdbcurl"),view.getProperties().get("huedbusername"),view.getProperties().get("huedbpassword")).getConnection();
+
+		Statement stat = conn.createStatement();
+
+		ResultSet rs = stat.executeQuery("select * from auth_user;");
+
+		while (rs.next()) {
+			username.add(rs.getString(2));
+		}
+
+		rs.close();
+
+		Connection c = null;
+		Statement stmt = null;
+
+		c = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),view.getProperties().get("ambarijdbcurl"),view.getProperties().get("ambaridbusername"),view.getProperties().get("ambaridbpassword")).getConnection();
+
+		c.setAutoCommit(false);
+		stmt = c.createStatement();
+
+		ResultSet rs1=null;
+
+		if(view.getProperties().get("ambaridrivername").contains("oracle")){
+      rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}'");
+    } else {
+      rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}';");
+    }
+
+		while (rs1.next()) {
+			instancename.add(rs1.getString(1));
+
+		}
+		rs1.close();
+		stmt.close();
+
+	%>
+	<div class="col-sm-12">
+		<form method="GET" onSubmit="validateAndSearch()">
+			<div class="panel panel-default">
+				<div class="panel-heading">
+					<h3>Hive History Query Migration</h3>
+				</div>
+				<div class="panel-body">
+
+					<div class="row">
+						<div class="col-sm-3">
+							UserName <font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter username1234(*)" name="username1" id="username1"> -->
+							<select class="form-control" name="username"
+								placeholder="User name" id="username" required>
+								<option value="default" selected>Select below</option>
+								<option value="all">ALL User</option>
+
+								<%
+									for (i = 0; i < username.size(); i++) {
+								%><option value="<%=username.get(i)%>"><%=username.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									username.clear();
+								%>
+							</select>
+
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">
+							Instance name <font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter Instance Name(*)" name="instance" id="instance"> -->
+							<select class="form-control" name="instance"
+								placeholder="Instance name" id="instance" required>
+								<option value="default" selected>Select below</option>
+
+								<%
+									for (i = 0; i < instancename.size(); i++) {
+								%><option value="<%=instancename.get(i)%>"><%=instancename.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									instancename.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">Start Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="startdate"
+								id="startdate">
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">End Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="enddate"
+								id="enddate">
+						</div>
+					</div>
+
+					<div class="row">
+
+						<div class="col-sm-3">
+							<input type="button" id="submit" class="btn btn-success"
+								value="submit" onclick="validateAndSearch()">
+						</div>
+					</div>
+
+					<div id="lines" style="display: none;"></div>
+					<br>
+					<br>
+					   <div class="progress" id="progressbar" style="display: none;">
+              <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="70" aria-valuemin="0" aria-valuemax="100"  style="width:0%">
+              </div>
+              </div>
+				</div>
+		</form>
+
+	</div>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hivesavedquerymigration.jsp
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hivesavedquerymigration.jsp b/contrib/views/hueambarimigration/src/main/resources/ui/hivesavedquerymigration.jsp
new file mode 100644
index 0000000..c70751d
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hivesavedquerymigration.jsp
@@ -0,0 +1,240 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<!DOCTYPE html>
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<!-- Bootstrap CSS and bootstrap datepicker CSS used for styling the demo pages-->
+
+
+
+
+ <script type="text/javascript">
+
+	function validateAndSearch() {
+
+		var uname = document.getElementById("username");
+		uname = uname.options[uname.selectedIndex].value;
+		var startdate = document.getElementById('startdate').value;
+		var enddate = document.getElementById('enddate').value;
+		var instance = document.getElementById("instance");
+		instance = instance.options[instance.selectedIndex].value;
+
+		if (uname == "default") {
+			alert("Please select an username");
+		} else if (instance == "default") {
+			alert("Please select an instance name");
+		} else {
+			$('#progressbar').show();
+			$('#lines').hide();
+
+			historyquery(uname, startdate, enddate, instance);
+			interval = setInterval(loadpercentage, 1000 );
+
+		}
+
+	}
+
+	function loadpercentage() {
+      $.ajax({
+         url : "ProgressBarStatus",
+         success : function(result) {
+         $('#progressbarhivesavedquery').css('width', result);
+         console.log("Got the precentage completion "+ result);
+        },
+
+      });
+
+  }
+
+
+
+	function historyquery(uname, startdate, enddate, instance) {
+
+		var url = "SavedQuery?username=" + uname + "&startdate=" + startdate
+        				+ "&enddate=" + enddate + "&instance=" + instance;
+		$.ajax({
+			url : url,
+			success : function(result) {
+				console.log("Got Result");
+				document.getElementById("lines").innerHTML = result;
+				$('#progressbar').hide()
+				$('#lines').show()
+				clearInterval(interval);
+
+			}
+		});
+
+
+
+	}
+</script>
+<%@ page import="java.sql.*"%>
+<%@ page import="org.sqlite.*"%>
+<%@ page import="java.util.ArrayList"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase"%>
+<%@ page import="javax.servlet.ServletConfig"%>
+<%@ page import="javax.servlet.ServletContext"%>
+<%@ page import="org.apache.ambari.view.ViewContext"%>
+</head>
+<div class="row">
+	<%
+		ArrayList<String> username = new ArrayList<String>();
+		ArrayList<String> instancename = new ArrayList<String>();
+		int i;
+
+		Connection conn = null;
+
+		ServletContext context = request.getServletContext();
+        ViewContext view=(ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+        System.out.println(view.getProperties());
+
+		conn = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"),view.getProperties().get("huejdbcurl"),view.getProperties().get("huedbusername"),view.getProperties().get("huedbpassword")).getConnection();
+
+		Statement stat = conn.createStatement();
+
+		ResultSet rs = stat.executeQuery("select * from auth_user;");
+
+		while (rs.next()) {
+			username.add(rs.getString(2));
+		}
+
+		rs.close();
+
+		Connection c = null;
+		Statement stmt = null;
+
+		c = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),view.getProperties().get("ambarijdbcurl"),view.getProperties().get("ambaridbusername"),view.getProperties().get("ambaridbpassword")).getConnection();
+
+		c.setAutoCommit(false);
+		stmt = c.createStatement();
+
+		ResultSet rs1=null;
+
+		if(view.getProperties().get("ambaridrivername").contains("oracle")){
+		  rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}'");
+		} else {
+		  rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}';");
+		}
+    while (rs1.next()) {
+			instancename.add(rs1.getString(1));
+
+		}
+		rs1.close();
+		stmt.close();
+
+	%>
+	<div class="col-sm-12">
+		<form method="GET" onSubmit="validateAndSearch()">
+			<div class="panel panel-default">
+				<div class="panel-heading">
+					<h3>Hive Saved Query Migration</h3>
+				</div>
+				<div class="panel-body">
+
+					<div class="row">
+						<div class="col-sm-3">
+							UserName <font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter username1234(*)" name="username1" id="username1"> -->
+							<select class="form-control" name="username"
+								placeholder="User name" id="username" required>
+								<option value="default" selected>Select below</option>
+								<option value="all">ALL User</option>
+
+								<%
+									for (i = 0; i < username.size(); i++) {
+								%><option value="<%=username.get(i)%>"><%=username.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									username.clear();
+								%>
+							</select>
+
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">
+							Instance name <font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter Instance Name(*)" name="instance" id="instance"> -->
+							<select class="form-control" name="instance"
+								placeholder="Instance name" id="instance" required>
+								<option value="default" selected>Select below</option>
+
+								<%
+									for (i = 0; i < instancename.size(); i++) {
+								%><option value="<%=instancename.get(i)%>"><%=instancename.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									instancename.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">Start Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="startdate"
+								id="startdate">
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">End Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="enddate"
+								id="enddate">
+						</div>
+					</div>
+
+					<div class="row">
+
+						<div class="col-sm-3">
+							<input type="button" id="submit" class="btn btn-success"
+								value="submit" onclick="validateAndSearch()">
+						</div>
+					</div>
+
+					<div id="lines" style="display: none;"></div>
+
+					<br>
+					<br>
+					   <div class="progress" id="progressbar" style="display: none;">
+              <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="70" aria-valuemin="0" aria-valuemax="100"  style="width:0%">
+          </div>
+        </div>
+
+				</div>
+		</form>
+
+	</div>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/homepage.jsp
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/homepage.jsp b/contrib/views/hueambarimigration/src/main/resources/ui/homepage.jsp
new file mode 100644
index 0000000..69aadac
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/homepage.jsp
@@ -0,0 +1,31 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<%@ page language="java" contentType="text/html; charset=US-ASCII"
+	pageEncoding="US-ASCII"%>
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
+
+</head>
+<body>
+
+
+</body>
+</html>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.bowerrc
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.bowerrc b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.bowerrc
deleted file mode 100644
index 959e169..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.bowerrc
+++ /dev/null
@@ -1,4 +0,0 @@
-{
-  "directory": "bower_components",
-  "analytics": false
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.editorconfig
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.editorconfig b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.editorconfig
deleted file mode 100644
index 47c5438..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.editorconfig
+++ /dev/null
@@ -1,34 +0,0 @@
-# EditorConfig helps developers define and maintain consistent
-# coding styles between different editors and IDEs
-# editorconfig.org
-
-root = true
-
-
-[*]
-end_of_line = lf
-charset = utf-8
-trim_trailing_whitespace = true
-insert_final_newline = true
-indent_style = space
-indent_size = 2
-
-[*.js]
-indent_style = space
-indent_size = 2
-
-[*.hbs]
-insert_final_newline = false
-indent_style = space
-indent_size = 2
-
-[*.css]
-indent_style = space
-indent_size = 2
-
-[*.html]
-indent_style = space
-indent_size = 2
-
-[*.{diff,md}]
-trim_trailing_whitespace = false

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.ember-cli
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.ember-cli b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.ember-cli
deleted file mode 100644
index 427f570..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.ember-cli
+++ /dev/null
@@ -1,10 +0,0 @@
-
-{
-  /**
-    Ember CLI sends analytics information by default. The data is completely
-    anonymous, but there are times when you might want to disable this behavior.
-
-    Setting `disableAnalytics` to true will prevent any data from being sent.
-  */
-  "disableAnalytics": false
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.gitignore
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.gitignore b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.gitignore
deleted file mode 100644
index f7245d9..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.gitignore
+++ /dev/null
@@ -1,44 +0,0 @@
-# See http://help.github.com/ignore-files/ for more about ignoring files.
-
-# compiled output
-/dist
-/tmp
-
-# NPM packages folder.
-
-node_modules/
-bower_components/
-node/
-
-# misc
-/.sass-cache
-/connect.lock
-/coverage/*
-/libpeerconnection.log
-npm-debug.log
-testem.log
-
-/.idea
-
-# Numerous always-ignore extensions
-*.diff
-*.err
-*.orig
-*.log
-*.rej
-*.swo
-*.swp
-*.vi
-*~
-*.sass-cache
-
-# OS or Editor folders
-.DS_Store
-.cache
-.project
-.settings
-.tmproj
-dist
-nbproject
-Thumbs.db
-