You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by gn...@apache.org on 2017/01/03 08:37:29 UTC
[3/5] ambari git commit: AMBARI-19168 Supports udf migration for hive
and pig migration and multi-user select for all migrations. (Ishan Bhatt via
gauravn7)
http://git-wip-us.apache.org/repos/asf/ambari/blob/bea5ebb8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
index 2c205b0..bab2084 100755
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
@@ -69,6 +69,8 @@ public class HiveHistoryMigrationUtility {
Connection connectionHuedb = null;
Connection connectionAmbaridb = null;
+ logger.info(System.getProperty("java.class.path"));
+
logger.info("--------------------------------------");
logger.info("hive History query Migration started");
logger.info("--------------------------------------");
@@ -83,24 +85,24 @@ public class HiveHistoryMigrationUtility {
String time = null;
Long epochTime = null;
String dirNameforHiveHistroy;
- ArrayList<HiveModel> dbpojoHiveHistoryQuery = new ArrayList<HiveModel>();
+ ArrayList<HiveModel> dbpojoHiveHistoryQuery;
HiveHistoryQueryMigrationImplementation hiveHistoryQueryImpl = new HiveHistoryQueryMigrationImplementation();// creating objects of HiveHistroy implementation
- QuerySet huedatabase = null;
+ QuerySetHueDB huedatabase = null;
/*instanciang queryset
* according to driver name
*/
if (view.getProperties().get("huedrivername").contains("mysql")) {
- huedatabase = new MysqlQuerySet();
+ huedatabase = new MysqlQuerySetHueDB();
} else if (view.getProperties().get("huedrivername").contains("postgresql")) {
- huedatabase = new PostgressQuerySet();
+ huedatabase = new PostgressQuerySetHueDB();
} else if (view.getProperties().get("huedrivername").contains("sqlite")) {
- huedatabase = new SqliteQuerySet();
+ huedatabase = new SqliteQuerySetHueDB();
} else if (view.getProperties().get("huedrivername").contains("oracle")) {
- huedatabase = new OracleQuerySet();
+ huedatabase = new OracleQuerySetHueDB();
}
@@ -117,101 +119,115 @@ public class HiveHistoryMigrationUtility {
try {
+ String[] usernames = username.split(",");
+ int totalQueries = 0;
+ for(int k=0; k<usernames.length; k++) {
+ connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();
+ username = usernames[k];
+ migrationresult.setProgressPercentage(0);
+ logger.info("Migration started for user " + username);
+ dbpojoHiveHistoryQuery = hiveHistoryQueryImpl.fetchFromHue(username, startDate, endDate, connectionHuedb, huedatabase);
+ totalQueries += dbpojoHiveHistoryQuery.size();
- connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();
+ for (int j = 0; j < dbpojoHiveHistoryQuery.size(); j++) {
+ logger.info("the query fetched from hue" + dbpojoHiveHistoryQuery.get(j).getQuery());
- dbpojoHiveHistoryQuery = hiveHistoryQueryImpl.fetchFromHue(username, startDate, endDate, connectionHuedb, huedatabase);
+ }
- for (int j = 0; j < dbpojoHiveHistoryQuery.size(); j++) {
- logger.info("the query fetched from hue" + dbpojoHiveHistoryQuery.get(j).getQuery());
+ /* if No migration query selected from Hue Database according to our search criteria */
- }
+ if (dbpojoHiveHistoryQuery.size() == 0) {
+ logger.info("No queries has been selected for the user " + username + " between dates: " + startDate +" - "+endDate);
- /* if No migration query selected from Hue Database according to our search criteria */
+ } else {
+ /* If hive queries are selected based on our search criteria */
- if (dbpojoHiveHistoryQuery.size() == 0) {
- migrationresult.setIsNoQuerySelected("yes");
- migrationresult.setProgressPercentage(0);
- migrationresult.setNumberOfQueryTransfered(0);
- migrationresult.setTotalNoQuery(dbpojoHiveHistoryQuery.size());
- getResourceManager(view).update(migrationresult, jobid);
- logger.info("No queries has been selected acccording to your criteria");
+ connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
+ connectionAmbaridb.setAutoCommit(false);
- } else {
- /* If hive queries are selected based on our search criteria */
+ // for each queries fetched from Hue database//
- connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
- connectionAmbaridb.setAutoCommit(false);
+ //
+ int id = 0;
- // for each queries fetched from Hue database//
+ id = hiveHistoryQueryImpl.fetchInstanceTablename(connectionAmbaridb, instance, ambaridatabase); // feching table name according to the given instance name
+ sequence = hiveHistoryQueryImpl.fetchSequenceno(connectionAmbaridb, id, ambaridatabase);
+ //
+ for (i = 0; i < dbpojoHiveHistoryQuery.size(); i++) {
- //
- int id = 0;
+ float calc = ((float) (i + 1)) / dbpojoHiveHistoryQuery.size() * 100;
+ int progressPercentage = Math.round(calc);
+ migrationresult.setProgressPercentage(progressPercentage);
+ migrationresult.setNumberOfQueryTransfered(i+1);
+ getResourceManager(view).update(migrationresult, jobid);
- id = hiveHistoryQueryImpl.fetchInstanceTablename(connectionAmbaridb, instance, ambaridatabase); // feching table name according to the given instance name
- sequence = hiveHistoryQueryImpl.fetchSequenceno(connectionAmbaridb, id, ambaridatabase);
- //
- for (i = 0; i < dbpojoHiveHistoryQuery.size(); i++) {
+ logger.info("_____________________");
+ logger.info("Loop No." + (i + 1));
+ logger.info("_____________________");
+ logger.info("Hue query that has been fetched" + dbpojoHiveHistoryQuery.get(i).getQuery());
- float calc = ((float) (i + 1)) / dbpojoHiveHistoryQuery.size() * 100;
- int progressPercentage = Math.round(calc);
- migrationresult.setIsNoQuerySelected("no");
- migrationresult.setProgressPercentage(progressPercentage);
- migrationresult.setNumberOfQueryTransfered(i + 1);
- migrationresult.setTotalNoQuery(dbpojoHiveHistoryQuery.size());
- getResourceManager(view).update(migrationresult, jobid);
+ logger.info("Table name has been fetched from intance name");
- logger.info("_____________________");
- logger.info("Loop No." + (i + 1));
- logger.info("_____________________");
- logger.info("Hue query that has been fetched" + dbpojoHiveHistoryQuery.get(i).getQuery());
+ hiveHistoryQueryImpl.writetoFileQueryhql(dbpojoHiveHistoryQuery.get(i).getQuery(), ConfigurationCheckImplementation.getHomeDir());// writing to .hql file to a temp file on local disk
- logger.info("Table name has been fetched from intance name");
+ logger.info(".hql file created in Temp directory");
- hiveHistoryQueryImpl.writetoFileQueryhql(dbpojoHiveHistoryQuery.get(i).getQuery(), ConfigurationCheckImplementation.getHomeDir());// writing to .hql file to a temp file on local disk
+ hiveHistoryQueryImpl.writetoFileLogs(ConfigurationCheckImplementation.getHomeDir());// writing to logs file to a temp file on local disk
- logger.info(".hql file created in Temp directory");
+ logger.info("Log file created in Temp directory");
- hiveHistoryQueryImpl.writetoFileLogs(ConfigurationCheckImplementation.getHomeDir());// writing to logs file to a temp file on local disk
+ maxCountOfAmbariDb = i + sequence + 1;
- logger.info("Log file created in Temp directory");
+ time = hiveHistoryQueryImpl.getTime();// getting the system current time.
- maxCountOfAmbariDb = i + sequence + 1;
+ epochTime = hiveHistoryQueryImpl.getEpochTime();// getting system time as epoch format
- time = hiveHistoryQueryImpl.getTime();// getting the system current time.
+ if(usernames[k].equals("all")) {
+ username = dbpojoHiveHistoryQuery.get(i).getOwnerName();
+ }
- epochTime = hiveHistoryQueryImpl.getEpochTime();// getting system time as epoch format
+ dirNameforHiveHistroy = "/user/" + username + "/hive/jobs/hive-job-" + maxCountOfAmbariDb + "-" + time + "/";// creating the directory name
- dirNameforHiveHistroy = "/user/"+username+"/hive/jobs/hive-job-" + maxCountOfAmbariDb + "-" + time + "/";// creating the directory name
+ logger.info("Directory name where .hql will be saved: " + dirNameforHiveHistroy);
- logger.info("Directory name where .hql will be saved: " + dirNameforHiveHistroy);
+ String versionName = hiveHistoryQueryImpl.getAllHiveVersionInstance(connectionAmbaridb, ambaridatabase, instance);
- String versionName = hiveHistoryQueryImpl.getAllHiveVersionInstance(connectionAmbaridb, ambaridatabase, instance);
+ hiveHistoryQueryImpl.insertRowinAmbaridb(dirNameforHiveHistroy, maxCountOfAmbariDb, epochTime, connectionAmbaridb, id, instance, i, ambaridatabase, versionName, username);// inserting in ambari database
- hiveHistoryQueryImpl.insertRowinAmbaridb(dirNameforHiveHistroy, maxCountOfAmbariDb, epochTime, connectionAmbaridb, id, instance, i, ambaridatabase, versionName, username);// inserting in ambari database
+ if (view.getProperties().get("KerberoseEnabled").equals("y")) {
- if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+ logger.info("kerberose enabled");
+ hiveHistoryQueryImpl.createDirKerberorisedSecured(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"), username, view.getProperties().get("PrincipalUserName"));// creating directory in kerborized secured hdfs
+ logger.info("Directory created in hdfs");
+ hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"), username, view.getProperties().get("PrincipalUserName"));// copying the .hql file to kerborized hdfs
+ hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"), username, view.getProperties().get("PrincipalUserName"));// copying the log file to kerborized hdfs
+ } else {
- logger.info("kerberose enabled");
- hiveHistoryQueryImpl.createDirKerberorisedSecured(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));// creating directory in kerborized secured hdfs
- logger.info("Directory created in hdfs");
- hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));// copying the .hql file to kerborized hdfs
- hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));// copying the log file to kerborized hdfs
- } else {
+ logger.info("kerberose not enabled");
+ hiveHistoryQueryImpl.createDir(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"), username);// creating directory in hdfs
+ logger.info("Directory created in hdfs");
+ hiveHistoryQueryImpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"), username);// copying the .hql file to hdfs
+ hiveHistoryQueryImpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"), username);// copying the log file to hdfs
+ }
- logger.info("kerberose not enabled");
- hiveHistoryQueryImpl.createDir(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"),username);// creating directory in hdfs
- logger.info("Directory created in hdfs");
- hiveHistoryQueryImpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"),username);// copying the .hql file to hdfs
- hiveHistoryQueryImpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"),username);// copying the log file to hdfs
}
+ hiveHistoryQueryImpl.updateSequenceno(connectionAmbaridb, maxCountOfAmbariDb, id, ambaridatabase);
+ connectionAmbaridb.commit();
}
- hiveHistoryQueryImpl.updateSequenceno(connectionAmbaridb, maxCountOfAmbariDb, id, ambaridatabase);
- connectionAmbaridb.commit();
-
-
+ logger.info("Migration completed for user " + username);
+ }
+ logger.info("Migration Completed");
+ migrationresult.setFlag(1);
+ if(totalQueries==0) {
+ migrationresult.setNumberOfQueryTransfered(0);
+ migrationresult.setTotalNoQuery(0);
+ } else {
+ migrationresult.setNumberOfQueryTransfered(totalQueries);
+ migrationresult.setTotalNoQuery(totalQueries);
+ migrationresult.setProgressPercentage(100);
}
+ getResourceManager(view).update(migrationresult, jobid);
} catch (SQLException e) {
logger.error("Sql exception in ambari database: ", e);
try {
http://git-wip-us.apache.org/repos/asf/ambari/blob/bea5ebb8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
index 55c81c0..e827b09 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
@@ -19,7 +19,7 @@
package org.apache.ambari.view.huetoambarimigration.migration.hive.historyquery;
import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.QuerySetAmbariDB;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset.QuerySet;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset.QuerySetHueDB;
import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel;
import org.apache.hadoop.conf.Configuration;
@@ -45,6 +45,7 @@ import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.Date;
+import java.net.URI;
public class HiveHistoryQueryMigrationImplementation {
@@ -258,7 +259,7 @@ public class HiveHistoryQueryMigrationImplementation {
}
- public ArrayList<HiveModel> fetchFromHue(String username, String startdate, String endtime, Connection connection, QuerySet huedatabase) throws ClassNotFoundException, SQLException {
+ public ArrayList<HiveModel> fetchFromHue(String username, String startdate, String endtime, Connection connection, QuerySetHueDB huedatabase) throws ClassNotFoundException, SQLException {
int id = 0;
int i = 0;
ArrayList<HiveModel> hiveArrayList = new ArrayList<HiveModel>();
@@ -268,11 +269,13 @@ public class HiveHistoryQueryMigrationImplementation {
connection.setAutoCommit(false);
PreparedStatement prSt = null;
Statement statement = connection.createStatement();
- String query;
+ String query, ownerName = "";
ResultSet rs;
+ int ownerId;
ResultSet rs1 = null;
if (username.equals("all")) {
+
} else {
@@ -320,7 +323,16 @@ public class HiveHistoryQueryMigrationImplementation {
while (rs1.next()) {
HiveModel hivepojo = new HiveModel();
+ ownerId = rs1.getInt("owner_id");
+ if(username.equals("all")) {
+ prSt = huedatabase.getUserName(connection, ownerId);
+ ResultSet resultSet = prSt.executeQuery();
+ while(resultSet.next()) {
+ ownerName = resultSet.getString("username");
+ }
+ }
query = rs1.getString("query");
+ hivepojo.setOwnerName(ownerName);
hivepojo.setQuery(query);
hiveArrayList.add(hivepojo);
i++;
@@ -436,11 +448,11 @@ public class HiveHistoryQueryMigrationImplementation {
public Boolean run() throws Exception {
- FileSystem fs = FileSystem.get(conf);
+ URI uri = new URI(dir);
+ FileSystem fs = FileSystem.get(uri, conf, username);
Path src = new Path(dir);
Boolean b = fs.mkdirs(src);
- fs.setOwner(src,username,"hadoop");
return b;
}
});
@@ -470,10 +482,10 @@ public class HiveHistoryQueryMigrationImplementation {
ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
public Boolean run() throws Exception {
- FileSystem fs = FileSystem.get(conf);
+ URI uri = new URI(dir);
+ FileSystem fs = FileSystem.get(uri, conf, username);
Path src = new Path(dir);
Boolean b = fs.mkdirs(src);
- fs.setOwner(src,username,"hadoop");
return b;
}
});
@@ -514,9 +526,7 @@ public class HiveHistoryQueryMigrationImplementation {
}
Path path = new Path(dest1);
- if (fileSystem.exists(path)) {
- }
// Path pathsource = new Path(source);
FSDataOutputStream out = fileSystem.create(path);
@@ -530,7 +540,7 @@ public class HiveHistoryQueryMigrationImplementation {
}
in.close();
out.close();
- fileSystem.setOwner(path,username,"hadoop");
+ fileSystem.setOwner(path, username, "hadoop");
fileSystem.close();
return null;
}
@@ -576,9 +586,7 @@ public class HiveHistoryQueryMigrationImplementation {
}
Path path = new Path(dest1);
- if (fileSystem.exists(path)) {
- }
FSDataOutputStream out = fileSystem.create(path);
@@ -592,7 +600,7 @@ public class HiveHistoryQueryMigrationImplementation {
}
in.close();
out.close();
- fileSystem.setOwner(path,username,"hadoop");
+ fileSystem.setOwner(path, username, "hadoop");
fileSystem.close();
return null;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/bea5ebb8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryStartJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryStartJob.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryStartJob.java
index 5228bf6..77ee359 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryStartJob.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryStartJob.java
@@ -52,6 +52,7 @@ public class HiveHistoryStartJob extends Thread{
migrationresult.setIntanceName(instance);
migrationresult.setUserNameofhue(username);
migrationresult.setProgressPercentage(0);
+ migrationresult.setFlag(0);
JSONObject response = new JSONObject();
http://git-wip-us.apache.org/repos/asf/ambari/blob/bea5ebb8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
index 4d9a6c3..c7ea7aa 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
@@ -20,7 +20,7 @@ package org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery;
import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel;
import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.QuerySetAmbariDB;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset.QuerySet;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset.QuerySetHueDb;
import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -36,6 +36,7 @@ import org.jdom.input.SAXBuilder;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
import org.json.JSONObject;
+import org.json.JSONArray;
import java.io.*;
import java.net.URISyntaxException;
@@ -44,10 +45,9 @@ import java.security.PrivilegedExceptionAction;
import java.sql.*;
import java.text.ParseException;
import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Calendar;
+import java.util.*;
import java.util.Date;
-import java.util.GregorianCalendar;
+import java.net.URI;
public class HiveSavedQueryMigrationImplementation {
@@ -151,7 +151,7 @@ public class HiveSavedQueryMigrationImplementation {
}
- public int fetchSequenceno(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws SQLException {
+ public int fetchSequenceno(Connection c, QuerySetAmbariDB ambaridatabase, String sequenceName) throws SQLException {
String ds_id = new String();
Statement stmt = null;
@@ -159,10 +159,10 @@ public class HiveSavedQueryMigrationImplementation {
int sequencevalue=0;
- ResultSet rs = null;
+ ResultSet rs = null;
- prSt = ambaridatabase.getSequenceNoFromAmbariSequence(c, id);
+ prSt = ambaridatabase.getSequenceNoFromAmbariSequence(c, sequenceName);
logger.info("sql statement to fetch is from ambari instance:= = " + prSt);
@@ -174,16 +174,16 @@ public class HiveSavedQueryMigrationImplementation {
return sequencevalue;
}
- public void updateSequenceno(Connection c, int seqNo, int id, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
+ public void updateSequenceno(Connection c, int seqNo, String sequenceName, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
PreparedStatement prSt;
- prSt = ambaridatabase.updateSequenceNoInAmbariSequence(c, seqNo, id);
+ prSt = ambaridatabase.updateSequenceNoInAmbariSequence(c, seqNo, sequenceName);
logger.info("The actual insert statement is " + prSt);
prSt.executeUpdate();
logger.info("adding revert sql hive history");
}
- public int fetchInstancetablenameForSavedqueryHive(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
+ public int fetchInstancetablename(Connection c, String instance, QuerySetAmbariDB ambaridatabase, String tableSequence) throws SQLException {
String ds_id = new String();
int id = 0;
@@ -194,9 +194,9 @@ public class HiveSavedQueryMigrationImplementation {
ResultSet rs = null;
- prSt = ambaridatabase.getTableIdFromInstanceNameSavedquery(c, instance);
+ prSt = ambaridatabase.getTableIdFromInstanceName(c, instance, tableSequence);
- logger.info("sql statement to fetch is from ambari instance:= = " + prSt);
+ logger.info("sql statement to fetch from ambari instance:= = " + prSt);
rs = prSt.executeQuery();
@@ -301,6 +301,20 @@ public class HiveSavedQueryMigrationImplementation {
}
+
+ public void insertUdf(Connection c, int fileid, int udfid, int maxcountFileResource, int maxcountUdf, String udfClass, String fileName, String udfName, String udfOwner, String udfPath, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
+
+ String revsql = null;
+
+ PreparedStatement prSt = null;
+
+ prSt = ambaridatabase.insertToFileResources(c, fileid, Integer.toString(maxcountFileResource), fileName, udfOwner, udfPath);
+ prSt.executeUpdate();
+ prSt = ambaridatabase.insertToHiveUdf(c, udfid, Integer.toString(maxcountUdf), Integer.toString(maxcountFileResource), udfClass, udfName, udfOwner);
+ prSt.executeUpdate();
+
+ }
+
public long getEpochTime() throws ParseException {
long seconds = System.currentTimeMillis() / 1000l;
@@ -335,7 +349,7 @@ public class HiveSavedQueryMigrationImplementation {
}
- public ArrayList<HiveModel> fetchFromHuedb(String username, String startdate, String endtime, Connection connection, QuerySet huedatabase)
+ public ArrayList<HiveModel> fetchFromHuedb(String username, String startdate, String endtime, Connection connection, QuerySetHueDb huedatabase)
throws ClassNotFoundException, IOException {
int id = 0;
int i = 0;
@@ -348,6 +362,8 @@ public class HiveSavedQueryMigrationImplementation {
connection.setAutoCommit(false);
PreparedStatement prSt = null;
ResultSet rs;
+ String ownerName="";
+ int ownerId;
if (username.equals("all")) {
} else {
@@ -392,28 +408,68 @@ public class HiveSavedQueryMigrationImplementation {
}
+ logger.info("Query Prepared statement is " + prSt.toString());
+
rs1 = prSt.executeQuery();
+ logger.info("Query executed");
+
while (rs1.next()) {
HiveModel hivepojo = new HiveModel();
- String name = rs1.getString("name");
+ ownerId = rs1.getInt("owner_id");
+ if(username.equals("all")) {
+ prSt = huedatabase.getUserName(connection, ownerId);
+ ResultSet resultSet = prSt.executeQuery();
+ while(resultSet.next()) {
+ ownerName = resultSet.getString("username");
+ }
+ }
+ String queryTitle = rs1.getString("name");
String temp = rs1.getString("data");
InputStream is = new ByteArrayInputStream(temp.getBytes());
BufferedReader rd = new BufferedReader(new InputStreamReader(
is, Charset.forName("UTF-8")));
String jsonText = readAll(rd);
+
JSONObject json = new JSONObject(jsonText);
String resources = json.get("query").toString();
- json = new JSONObject(resources);
+ logger.info("query: "+resources);
+ JSONArray fileResources = (JSONArray) json.get("file_resources");
+ JSONArray functions = (JSONArray) json.get("functions");
+ ArrayList<String> filePaths = new ArrayList<String>();
+ ArrayList<String> classNames = new ArrayList<String>();
+ ArrayList<String> udfNames = new ArrayList<String>();
+
+ for(int j=0;j<fileResources.length();j++) {
+ filePaths.add(fileResources.getJSONObject(j).get("path").toString());
+ }
+ for(int j=0;j<functions.length();j++) {
+ classNames.add(functions.getJSONObject(j).get("class_name").toString());
+ udfNames.add(functions.getJSONObject(j).get("name").toString());
+ }
+
+ logger.info("Paths are: " + Arrays.toString(filePaths.toArray()));
+ logger.info("Class names are: " + Arrays.toString(classNames.toArray()));
+ logger.info("Udf names are: " + Arrays.toString(udfNames.toArray()));
+
+
+ json = new JSONObject(resources);
String resarr = (json.get("query")).toString();
json = new JSONObject(resources);
String database = (json.get("database")).toString();
+
hivepojo.setQuery(resarr);
+ hivepojo.setOwnerName(ownerName);
hivepojo.setDatabase(database);
- hivepojo.setOwner(name);
+ hivepojo.setQueryTitle(queryTitle);
+ if(filePaths.size() > 0) {
+ hivepojo.setFilePaths(filePaths);
+ hivepojo.setUdfClasses(classNames);
+ hivepojo.setUdfNames(udfNames);
+ }
hiveArrayList.add(hivepojo);
i++;
}
@@ -435,6 +491,21 @@ public class HiveSavedQueryMigrationImplementation {
}
+ public boolean checkUdfExists(Connection connection, String fileName, String username, int tableId, QuerySetAmbariDB ambaridatabase, HashSet<String> udfSet) throws SQLException{
+ //check if it is already in the database
+ ResultSet rs = ambaridatabase.getUdfFileNamesAndOwners(connection, tableId).executeQuery();
+ while(rs.next()){
+ logger.info("fileName: "+fileName+" ds_name:"+rs.getString("ds_name")+" username:"+username+" ds_owner:"+rs.getString("ds_owner"));
+ if(rs.getString("ds_name").equals(fileName) && rs.getString("ds_owner").equals(username)) {
+ return true;
+ }
+ }
+ //check if it is one of the udf's selected in this migration
+ if(udfSet.contains(fileName+username)) {
+ return true;
+ }
+ return false;
+ }
public void writetoFilequeryHql(String content, String homedir) {
try {
@@ -536,10 +607,10 @@ public class HiveSavedQueryMigrationImplementation {
public Void run() throws Exception {
- FileSystem fs = FileSystem.get(conf);
+ URI uri = new URI(dir);
+ FileSystem fs = FileSystem.get(uri, conf, username);
Path src = new Path(dir);
fs.mkdirs(src);
- fs.setOwner(src,username,"hadoop");
return null;
}
});
@@ -568,10 +639,10 @@ public class HiveSavedQueryMigrationImplementation {
ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
public Boolean run() throws Exception {
- FileSystem fs = FileSystem.get(conf);
+ URI uri = new URI(dir);
+ FileSystem fs = FileSystem.get(uri, conf, username);
Path src = new Path(dir);
Boolean b = fs.mkdirs(src);
- fs.setOwner(src,username,"hadoop");
return b;
}
});
@@ -613,9 +684,7 @@ public class HiveSavedQueryMigrationImplementation {
}
Path path = new Path(dest1);
- if (fileSystem.exists(path)) {
- }
FSDataOutputStream out = fileSystem.create(path);
InputStream in = new BufferedInputStream(
@@ -628,7 +697,7 @@ public class HiveSavedQueryMigrationImplementation {
}
in.close();
out.close();
- fileSystem.setOwner(path,username,"hadoop");
+ fileSystem.setOwner(path, username, "hadoop");
fileSystem.close();
return null;
}
@@ -690,7 +759,7 @@ public class HiveSavedQueryMigrationImplementation {
}
in.close();
out.close();
- fileSystem.setOwner(path,username,"hadoop");
+ fileSystem.setOwner(path, username, "hadoop");
fileSystem.close();
return null;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/bea5ebb8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
index 566b369..5fc171c 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
@@ -43,11 +43,19 @@ import java.sql.Connection;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.ArrayList;
+import java.util.HashSet;
public class HiveSavedQueryMigrationUtility {
protected MigrationResourceManager resourceManager = null;
+ private static final String SAVEDQUERYTABLE = "ds_savedquery";
+ private static final String FILETABLE = "ds_fileresourceitem";
+ private static final String UDFTABLE = "ds_udf";
+ private static final String SEQ = "id_seq";
+ private static final String SAVEDQUERYSEQUENCE = "org.apache.ambari.view.%hive%.resources.savedQueries.SavedQuery";
+ private static final String FILERESOURCESEQUENCE = "org.apache.ambari.view.%hive%.resources.resources.FileResourceItem";
+ private static final String UDFSEQUENCE = "org.apache.ambari.view.%hive%.resources.udfs.UDF";
public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
if (resourceManager == null) {
@@ -65,7 +73,8 @@ public class HiveSavedQueryMigrationUtility {
Connection connectionAmbaridb = null;
Connection connectionHuedb = null;
- int i = 0;
+ int i = 0, j=0;
+ String sequenceName = "";
logger.info("-------------------------------------");
logger.info("hive saved query Migration started");
@@ -77,16 +86,20 @@ public class HiveSavedQueryMigrationUtility {
HiveSavedQueryMigrationImplementation hivesavedqueryimpl = new HiveSavedQueryMigrationImplementation();/* creating Implementation object */
- QuerySet huedatabase = null;
+ QuerySetHueDb huedatabase = null;
if (view.getProperties().get("huedrivername").contains("mysql")) {
- huedatabase = new MysqlQuerySet();
+ huedatabase = new MysqlQuerySetHueDb();
+ logger.info("Hue database is MySQL");
} else if (view.getProperties().get("huedrivername").contains("postgresql")) {
- huedatabase = new PostgressQuerySet();
+ huedatabase = new PostgressQuerySetHueDb();
+ logger.info("Hue database is Postgres");
} else if (view.getProperties().get("huedrivername").contains("sqlite")) {
- huedatabase = new SqliteQuerySet();
+ huedatabase = new SqliteQuerySetHueDb();
+ logger.info("Hue database is SQLite");
} else if (view.getProperties().get("huedrivername").contains("oracle")) {
- huedatabase = new OracleQuerySet();
+ huedatabase = new OracleQuerySetHueDb();
+ logger.info("Hue database is Oracle");
}
@@ -95,112 +108,158 @@ public class HiveSavedQueryMigrationUtility {
if (view.getProperties().get("ambaridrivername").contains("mysql")) {
ambaridatabase = new MysqlQuerySetAmbariDB();
+ logger.info("Ambari database is MySQL");
} else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
ambaridatabase = new PostgressQuerySetAmbariDB();
+ logger.info("Ambari database is PostGres");
} else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
ambaridatabase = new OracleQuerySetAmbariDB();
+ logger.info("Ambari database is Oracle");
}
- int maxcountForHivehistroryAmbaridb, maxCountforSavequeryAmbaridb = 0;
+ int maxCountforFileResourceAmbaridb=0, maxCountforUdfAmbaridb=0, maxCountforSavequeryAmbaridb = 0;
String time = null;
Long epochtime = null;
String dirNameforHiveSavedquery;
ArrayList<HiveModel> dbpojoHiveSavedQuery = new ArrayList<HiveModel>();
+ HashSet<String> udfSet = new HashSet<>();
+
try {
+ String[] usernames = username.split(",");
+ int totalQueries = 0;
+ for(int l=0; l<usernames.length; l++) {
+ connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection(); /* fetching connection to hue DB */
+ logger.info("Hue database connection successful");
- connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection(); /* fetching connection to hue DB */
+ username = usernames[l];
+ migrationresult.setProgressPercentage(0);
+ dbpojoHiveSavedQuery = hivesavedqueryimpl.fetchFromHuedb(username, startDate, endDate, connectionHuedb, huedatabase); /* fetching data from hue db and storing it in to a model */
+ totalQueries += dbpojoHiveSavedQuery.size();
- dbpojoHiveSavedQuery = hivesavedqueryimpl.fetchFromHuedb(username, startDate, endDate, connectionHuedb, huedatabase); /* fetching data from hue db and storing it in to a model */
+ logger.info("Migration started for user " + username);
+ logger.info("Queries fetched from hue..");
+ for (i = 0; i < dbpojoHiveSavedQuery.size(); i++) {
+ logger.info("the query fetched from hue" + dbpojoHiveSavedQuery.get(i).getQuery());
- for (int j = 0; j < dbpojoHiveSavedQuery.size(); j++) {
- logger.info("the query fetched from hue" + dbpojoHiveSavedQuery.get(j).getQuery());
+ }
- }
+ if (dbpojoHiveSavedQuery.size() == 0) /* if no data has been fetched from hue db according to search criteria */ {
- if (dbpojoHiveSavedQuery.size() == 0) /* if no data has been fetched from hue db according to search criteria */ {
+ logger.info("No queries has been selected for the user " + username + " between dates: " + startDate +" - "+endDate);
+ } else {
- migrationresult.setIsNoQuerySelected("yes");
- migrationresult.setProgressPercentage(0);
- migrationresult.setNumberOfQueryTransfered(0);
- migrationresult.setTotalNoQuery(dbpojoHiveSavedQuery.size());
- getResourceManager(view).update(migrationresult, jobid);
- logger.info("No queries has been selected acccording to your criteria");
+ connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();/* connecting to ambari DB */
+ connectionAmbaridb.setAutoCommit(false);
- logger.info("no hive saved query has been selected from hue according to your criteria of searching");
+ int tableIdSavedQuery = hivesavedqueryimpl.fetchInstancetablename(connectionAmbaridb, instance, ambaridatabase, SAVEDQUERYSEQUENCE); /* fetching the instance table name for migration saved query from the given instance name */
+ int tableIdFileResource = hivesavedqueryimpl.fetchInstancetablename(connectionAmbaridb, instance, ambaridatabase, FILERESOURCESEQUENCE);
+ int tableIdUdf = hivesavedqueryimpl.fetchInstancetablename(connectionAmbaridb, instance, ambaridatabase, UDFSEQUENCE);
+ sequenceName = SAVEDQUERYTABLE + "_" + tableIdSavedQuery + "_" + SEQ;
+ int savedQuerySequence = hivesavedqueryimpl.fetchSequenceno(connectionAmbaridb, ambaridatabase, sequenceName);
+ sequenceName = FILETABLE + "_" + tableIdFileResource + "_" + SEQ;
+ int fileResourceSequence = hivesavedqueryimpl.fetchSequenceno(connectionAmbaridb, ambaridatabase, sequenceName);
+ sequenceName = UDFTABLE + "_" + tableIdUdf + "_" + SEQ;
+ int udfSequence = hivesavedqueryimpl.fetchSequenceno(connectionAmbaridb, ambaridatabase, sequenceName);
+ for (i = 0; i < dbpojoHiveSavedQuery.size(); i++) {
- } else {
+ logger.info("_____________________");
+ logger.info("Loop No." + (i + 1));
+ logger.info("_____________________");
- connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();/* connecting to ambari DB */
- connectionAmbaridb.setAutoCommit(false);
+ float calc = ((float) (i + 1)) / dbpojoHiveSavedQuery.size() * 100;
+ int progressPercentage = Math.round(calc);
- int tableIdSavedQuery = hivesavedqueryimpl.fetchInstancetablenameForSavedqueryHive(connectionAmbaridb, instance, ambaridatabase); /* fetching the instance table name for migration saved query from the given instance name */
- int sequence = hivesavedqueryimpl.fetchSequenceno(connectionAmbaridb, tableIdSavedQuery, ambaridatabase);
+ migrationresult.setProgressPercentage(progressPercentage);
+ migrationresult.setNumberOfQueryTransfered(i + 1);
+ getResourceManager(view).update(migrationresult, jobid);
- for (i = 0; i < dbpojoHiveSavedQuery.size(); i++) {
+ logger.info("query fetched from hue:- " + dbpojoHiveSavedQuery.get(i).getQuery());
- logger.info("_____________________");
- logger.info("Loop No." + (i + 1));
- logger.info("_____________________");
+ logger.info("Table name are fetched from instance name.");
- float calc = ((float) (i + 1)) / dbpojoHiveSavedQuery.size() * 100;
- int progressPercentage = Math.round(calc);
+ hivesavedqueryimpl.writetoFilequeryHql(dbpojoHiveSavedQuery.get(i).getQuery(), ConfigurationCheckImplementation.getHomeDir()); /* writing migration query to a local file*/
- migrationresult.setIsNoQuerySelected("no");
- migrationresult.setProgressPercentage(progressPercentage);
- migrationresult.setNumberOfQueryTransfered(i + 1);
- migrationresult.setTotalNoQuery(dbpojoHiveSavedQuery.size());
- getResourceManager(view).update(migrationresult, jobid);
+ hivesavedqueryimpl.writetoFileLogs(ConfigurationCheckImplementation.getHomeDir());/* writing logs to localfile */
- logger.info("query fetched from hue:- " + dbpojoHiveSavedQuery.get(i).getQuery());
+ logger.info(".hql and logs file are saved in temporary directory");
- logger.info("Table name are fetched from instance name.");
+ maxCountforSavequeryAmbaridb = i + savedQuerySequence + 1;
- hivesavedqueryimpl.writetoFilequeryHql(dbpojoHiveSavedQuery.get(i).getQuery(), ConfigurationCheckImplementation.getHomeDir()); /* writing migration query to a local file*/
+ time = hivesavedqueryimpl.getTime();/* getting system time */
- hivesavedqueryimpl.writetoFileLogs(ConfigurationCheckImplementation.getHomeDir());/* writing logs to localfile */
+ if(usernames[l].equals("all")) {
+ username = dbpojoHiveSavedQuery.get(i).getOwnerName();
+ }
- logger.info(".hql and logs file are saved in temporary directory");
+ dirNameforHiveSavedquery = "/user/" + username + "/hive/scripts/hive-query-" + maxCountforSavequeryAmbaridb + "-"
+ + time + "/"; // creating hdfs directory name
- maxCountforSavequeryAmbaridb = i + sequence + 1;
+ logger.info("Directory will be creted in HDFS" + dirNameforHiveSavedquery);
- time = hivesavedqueryimpl.getTime();/* getting system time */
+ logger.info("Row inserted in hive History table.");
- dirNameforHiveSavedquery = "/user/"+username+"/hive/scripts/hive-query-" + maxCountforSavequeryAmbaridb + "-"
- + time + "/"; // creating hdfs directory name
+ if (view.getProperties().get("KerberoseEnabled").equals("y")) {
- logger.info("Directory will be creted in HDFS" + dirNameforHiveSavedquery);
+ logger.info("Kerberose Enabled");
+ hivesavedqueryimpl.createDirHiveSecured(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"), username, view.getProperties().get("PrincipalUserName"));// creating directory in hdfs in kerborized cluster
+ hivesavedqueryimpl.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"), username, view.getProperties().get("PrincipalUserName"));// putting .hql file in hdfs in kerberoroized cluster
+ hivesavedqueryimpl.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"), username, view.getProperties().get("PrincipalUserName"));// putting logs file in hdfs in kerberoroized cluster
- logger.info("Row inserted in hive History table.");
+ } else {
+ logger.info("Kerberose Not Enabled");
+ hivesavedqueryimpl.createDirHive(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"), username);// creating directory in hdfs
+ hivesavedqueryimpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"), username);// putting .hql file in hdfs directory
+ hivesavedqueryimpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"), username);// putting logs file in hdfs
+ }
- if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+ //inserting into hived saved query table
+ //6.
+ hivesavedqueryimpl.insertRowinSavedQuery(maxCountforSavequeryAmbaridb, dbpojoHiveSavedQuery.get(i).getDatabase(), dirNameforHiveSavedquery, dbpojoHiveSavedQuery.get(i).getQuery(), dbpojoHiveSavedQuery.get(i).getQueryTitle(), connectionAmbaridb, tableIdSavedQuery, instance, i, ambaridatabase, username);
+ //check if udfs needs to be migrated
+ if (dbpojoHiveSavedQuery.get(i).getFilePaths() != null) {
+ for (int k = 0; k < dbpojoHiveSavedQuery.get(i).getFilePaths().size(); k++) {
+ String filePath = dbpojoHiveSavedQuery.get(i).getFilePaths().get(k);
+ String fileName = filePath.substring(filePath.lastIndexOf('/') + 1, filePath.length());
+ //check of a udf is alread present (udf name and owner name should be the same)
+ if (!hivesavedqueryimpl.checkUdfExists(connectionAmbaridb, fileName, username, tableIdFileResource, ambaridatabase, udfSet)) {
+ udfSet.add(fileName + username);
+ maxCountforFileResourceAmbaridb = j + fileResourceSequence + 1;
+ maxCountforUdfAmbaridb = j + udfSequence + 1;
+ String absoluteFilePath = view.getProperties().get("namenode_URI_Ambari") + filePath;
+ hivesavedqueryimpl.insertUdf(connectionAmbaridb, tableIdFileResource, tableIdUdf, maxCountforFileResourceAmbaridb, maxCountforUdfAmbaridb, dbpojoHiveSavedQuery.get(i).getUdfClasses().get(k), fileName, dbpojoHiveSavedQuery.get(i).getUdfNames().get(k), username, absoluteFilePath, ambaridatabase);
+ j = j + 1;
+ }
+ }
+ }
- logger.info("Kerberose Enabled");
- hivesavedqueryimpl.createDirHiveSecured(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));// creating directory in hdfs in kerborized cluster
- hivesavedqueryimpl.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));// putting .hql file in hdfs in kerberoroized cluster
- hivesavedqueryimpl.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));// putting logs file in hdfs in kerberoroized cluster
- } else {
- logger.info("Kerberose Not Enabled");
- hivesavedqueryimpl.createDirHive(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"),username);// creating directory in hdfs
- hivesavedqueryimpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"),username);// putting .hql file in hdfs directory
- hivesavedqueryimpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"),username);// putting logs file in hdfs
}
-
- //inserting into hived saved query table
- //6.
- hivesavedqueryimpl.insertRowinSavedQuery(maxCountforSavequeryAmbaridb, dbpojoHiveSavedQuery.get(i).getDatabase(), dirNameforHiveSavedquery, dbpojoHiveSavedQuery.get(i).getQuery(), dbpojoHiveSavedQuery.get(i).getOwner(), connectionAmbaridb, tableIdSavedQuery, instance, i, ambaridatabase, username);
+ sequenceName = SAVEDQUERYTABLE + "_" + tableIdSavedQuery + "_" + SEQ;
+ hivesavedqueryimpl.updateSequenceno(connectionAmbaridb, maxCountforSavequeryAmbaridb, sequenceName, ambaridatabase);
+ sequenceName = FILETABLE + "_" + tableIdFileResource + "_" + SEQ;
+ hivesavedqueryimpl.updateSequenceno(connectionAmbaridb, maxCountforFileResourceAmbaridb, sequenceName, ambaridatabase);
+ sequenceName = UDFTABLE + "_" + tableIdUdf + "_" + SEQ;
+ hivesavedqueryimpl.updateSequenceno(connectionAmbaridb, maxCountforUdfAmbaridb, sequenceName, ambaridatabase);
+ connectionAmbaridb.commit();
}
- hivesavedqueryimpl.updateSequenceno(connectionAmbaridb, maxCountforSavequeryAmbaridb, tableIdSavedQuery, ambaridatabase);
- connectionAmbaridb.commit();
-
+ logger.info("Migration completed for user " + username);
}
-
-
+ logger.info("Migration Completed");
+ migrationresult.setFlag(1);
+ if(totalQueries==0) {
+ migrationresult.setNumberOfQueryTransfered(0);
+ migrationresult.setTotalNoQuery(0);
+ } else {
+ migrationresult.setNumberOfQueryTransfered(totalQueries);
+ migrationresult.setTotalNoQuery(totalQueries);
+ migrationresult.setProgressPercentage(100);
+ }
+ getResourceManager(view).update(migrationresult, jobid);
} catch (SQLException e) {
logger.error("SQL exception: ", e);
http://git-wip-us.apache.org/repos/asf/ambari/blob/bea5ebb8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
index de81f0b..ab2b903 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
@@ -53,6 +53,7 @@ public class HiveSavedQueryStartJob extends Thread{
migrationresult.setIntanceName(instance);
migrationresult.setUserNameofhue(username);
migrationresult.setProgressPercentage(0);
+ migrationresult.setFlag(0);
JSONObject response = new JSONObject();
http://git-wip-us.apache.org/repos/asf/ambari/blob/bea5ebb8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
index 8f843d0..ec39d55 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
@@ -31,9 +31,10 @@ import java.util.Date;
import java.util.GregorianCalendar;
import java.io.*;
import java.net.URISyntaxException;
+import java.net.URI;
;
import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset.QuerySetAmbariDB;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset.QuerySet;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset.QuerySetHueDb;
import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -259,7 +260,7 @@ public class PigJobMigrationImplementation {
return strDate;
}
- public ArrayList<PigModel> fetchFromHueDB(String username, String startdate, String endtime, Connection connection, QuerySet huedatabase) throws ClassNotFoundException, IOException {
+ public ArrayList<PigModel> fetchFromHueDB(String username, String startdate, String endtime, Connection connection, QuerySetHueDb huedatabase) throws ClassNotFoundException, IOException {
int id = 0;
int i = 0;
String[] query = new String[100];
@@ -269,6 +270,8 @@ public class PigJobMigrationImplementation {
PreparedStatement prSt = null;
Statement statement = connection.createStatement();
ResultSet rs;
+ String ownerName = "";
+ int ownerId;
ResultSet rs1 = null;
if (username.equals("all")) {
@@ -321,6 +324,14 @@ public class PigJobMigrationImplementation {
while (rs1.next()) {
PigModel pigjjobobject = new PigModel();
+ ownerId = rs1.getInt("user_id");
+ if(username.equals("all")) {
+ prSt = huedatabase.getUserName(connection, ownerId);
+ ResultSet resultSet = prSt.executeQuery();
+ while(resultSet.next()) {
+ ownerName = resultSet.getString("username");
+ }
+ }
int runstatus = rs1.getInt("status");
@@ -336,6 +347,7 @@ public class PigJobMigrationImplementation {
String title = rs1.getString("script_title");
+ pigjjobobject.setUserName(ownerName);
pigjjobobject.setTitle(title);
String dir = rs1.getString("statusdir");
pigjjobobject.setDir(dir);
@@ -401,10 +413,10 @@ public class PigJobMigrationImplementation {
conf.set("fs.defaultFS", namenodeuri);
conf.set("hadoop.job.ugi", "hdfs");
- FileSystem fs = FileSystem.get(conf);
+ URI uri = new URI(dir);
+ FileSystem fs = FileSystem.get(uri, conf, username);
Path src = new Path(dir);
fs.mkdirs(src);
- fs.setOwner(src,username,"hadoop");
return null;
}
});
@@ -434,10 +446,10 @@ public class PigJobMigrationImplementation {
ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
public Boolean run() throws Exception {
- FileSystem fs = FileSystem.get(conf);
+ URI uri = new URI(dir);
+ FileSystem fs = FileSystem.get(uri, conf, username);
Path src = new Path(dir);
Boolean b = fs.mkdirs(src);
- fs.setOwner(src,username,"hadoop");
return b;
}
});
@@ -478,11 +490,15 @@ public class PigJobMigrationImplementation {
}
Path path1 = new Path(source);
+ if(!fileSystemHue.exists(path1)) {
+ FSDataOutputStream out = fileSystemHue.create(path1);
+ out.close();
+ }
FSDataInputStream in1 = fileSystemHue.open(path1);
Path path = new Path(dest1);
if (fileSystemAmbari.exists(path)) {
-
+ fileSystemAmbari.delete(path, true);
}
FSDataOutputStream out = fileSystemAmbari.create(path);
@@ -494,7 +510,8 @@ public class PigJobMigrationImplementation {
}
in1.close();
out.close();
- fileSystemAmbari.setOwner(path,username,"hadoop");
+ fileSystemAmbari.setOwner(path, username, "hadoop");
+ fileSystemHue.close();
fileSystemAmbari.close();
return null;
}
@@ -545,11 +562,15 @@ public class PigJobMigrationImplementation {
}
Path path1 = new Path(source);
+ if(!fileSystemHue.exists(path1)) {
+ FSDataOutputStream out = fileSystemHue.create(path1);
+ out.close();
+ }
FSDataInputStream in1 = fileSystemHue.open(path1);
Path path = new Path(dest1);
if (fileSystemAmbari.exists(path)) {
-
+ fileSystemAmbari.delete(path, true);
}
FSDataOutputStream out = fileSystemAmbari.create(path);
byte[] b = new byte[1024];
@@ -559,7 +580,8 @@ public class PigJobMigrationImplementation {
}
in1.close();
out.close();
- fileSystemAmbari.setOwner(path,username,"hadoop");
+ fileSystemAmbari.setOwner(path, username, "hadoop");
+ fileSystemHue.close();
fileSystemAmbari.close();
return null;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/bea5ebb8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
index c9cfc9d..7bd36ea 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
@@ -1,216 +1,230 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.text.ParseException;
-import java.util.ArrayList;
-
-import org.apache.ambari.view.ViewContext;
-
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
-import org.apache.log4j.Logger;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset.*;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset.*;
-
-public class PigJobMigrationUtility {
-
- protected MigrationResourceManager resourceManager = null;
-
- public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
- if (resourceManager == null) {
- resourceManager = new MigrationResourceManager(view);
+ /**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+ package org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob;
+
+ import java.beans.PropertyVetoException;
+ import java.io.IOException;
+ import java.net.URISyntaxException;
+ import java.sql.Connection;
+ import java.sql.SQLException;
+ import java.text.ParseException;
+ import java.util.ArrayList;
+
+ import org.apache.ambari.view.ViewContext;
+
+ import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+ import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
+ import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
+ import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+ import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
+ import org.apache.log4j.Logger;
+
+ import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+ import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+ import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset.*;
+ import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset.*;
+
+ public class PigJobMigrationUtility {
+
+ protected MigrationResourceManager resourceManager = null;
+
+ public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
+ if (resourceManager == null) {
+ resourceManager = new MigrationResourceManager(view);
+ }
+ return resourceManager;
}
- return resourceManager;
- }
- public void pigJobMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound {
+ public void pigJobMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound {
- long startTime = System.currentTimeMillis();
+ long startTime = System.currentTimeMillis();
- final Logger logger = Logger.getLogger(PigJobMigrationUtility.class);
- Connection connectionHuedb = null;
- Connection connectionAmbaridb = null;
+ final Logger logger = Logger.getLogger(PigJobMigrationUtility.class);
+ Connection connectionHuedb = null;
+ Connection connectionAmbaridb = null;
- logger.info("------------------------------");
- logger.info("pig Jobs Migration started");
- logger.info("------------------------------");
- logger.info("start date: " + startDate);
- logger.info("enddate date: " + endDate);
- logger.info("instance is: " + username);
- logger.info("hue username is : " + instance);
+ logger.info("------------------------------");
+ logger.info("pig Jobs Migration started");
+ logger.info("------------------------------");
+ logger.info("start date: " + startDate);
+ logger.info("enddate date: " + endDate);
+ logger.info("instance is: " + username);
+ logger.info("hue username is : " + instance);
- PigJobMigrationImplementation pigjobimpl = new PigJobMigrationImplementation();// creating the implementation object
+ PigJobMigrationImplementation pigjobimpl = new PigJobMigrationImplementation();// creating the implementation object
- QuerySet huedatabase = null;
+ QuerySetHueDb huedatabase = null;
- if (view.getProperties().get("huedrivername").contains("mysql")) {
- huedatabase = new MysqlQuerySet();
- } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
- huedatabase = new PostgressQuerySet();
- } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
- huedatabase = new SqliteQuerySet();
- } else if (view.getProperties().get("huedrivername").contains("oracle")) {
- huedatabase = new OracleQuerySet();
- }
+ if (view.getProperties().get("huedrivername").contains("mysql")) {
+ huedatabase = new MysqlQuerySetHueDb();
+ } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
+ huedatabase = new PostgressQuerySetHueDb();
+ } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
+ huedatabase = new SqliteQuerySetHueDb();
+ } else if (view.getProperties().get("huedrivername").contains("oracle")) {
+ huedatabase = new OracleQuerySetHueDb();
+ }
- QuerySetAmbariDB ambaridatabase = null;
+ QuerySetAmbariDB ambaridatabase = null;
- if (view.getProperties().get("ambaridrivername").contains("mysql")) {
- ambaridatabase = new MysqlQuerySetAmbariDB();
- } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
- ambaridatabase = new PostgressQuerySetAmbariDB();
- } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
- ambaridatabase = new OracleQuerySetAmbariDB();
- }
- int maxCountforPigScript = 0, i = 0;
+ if (view.getProperties().get("ambaridrivername").contains("mysql")) {
+ ambaridatabase = new MysqlQuerySetAmbariDB();
+ } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
+ ambaridatabase = new PostgressQuerySetAmbariDB();
+ } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
+ ambaridatabase = new OracleQuerySetAmbariDB();
+ }
+ int maxCountforPigScript = 0, i = 0;
- String time = null, timeIndorder = null;
- Long epochtime = null;
- String pigJobDirName;
- ArrayList<PigModel> pigJobDbPojo = new ArrayList<PigModel>();
+ String time = null, timeIndorder = null;
+ Long epochtime = null;
+ String pigJobDirName;
+ ArrayList<PigModel> pigJobDbPojo = new ArrayList<PigModel>();
- try {
+ try {
- connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connecting to hue database
+ String[] usernames = username.split(",");
+ int totalQueries = 0;
+ for(int k=0; k<usernames.length; k++) {
- pigJobDbPojo = pigjobimpl.fetchFromHueDB(username, startDate, endDate, connectionHuedb, huedatabase);// fetching the PigJobs details from hue
+ connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connecting to hue database
+ username = usernames[k];
+ migrationresult.setProgressPercentage(0);
+ logger.info("Migration started for user " + username);
+ pigJobDbPojo = pigjobimpl.fetchFromHueDB(username, startDate, endDate, connectionHuedb, huedatabase);// fetching the PigJobs details from hue
+ totalQueries += pigJobDbPojo.size();
+ for (int j = 0; j < pigJobDbPojo.size(); j++) {
+ logger.info("the query fetched from hue=" + pigJobDbPojo.get(i).getScript());
- for (int j = 0; j < pigJobDbPojo.size(); j++) {
- logger.info("the query fetched from hue=" + pigJobDbPojo.get(i).getScript());
+ }
- }
+ /*No pig Job details has been fetched accordring to search criteria*/
+ if (pigJobDbPojo.size() == 0) {
- /*No pig Job details has been fetched accordring to search criteria*/
- if (pigJobDbPojo.size() == 0) {
+ logger.info("No queries has been selected for the user " + username + " between dates: " + startDate +" - "+endDate);
+ } else {
- migrationresult.setIsNoQuerySelected("yes");
- migrationresult.setProgressPercentage(0);
- migrationresult.setNumberOfQueryTransfered(0);
- migrationresult.setTotalNoQuery(pigJobDbPojo.size());
- getResourceManager(view).update(migrationresult, jobid);
- logger.info("no pig Job has been selected from hue according to your criteria of searching");
+ connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
+ connectionAmbaridb.setAutoCommit(false);
- } else {
+ int fetchPigTablenameInstance = pigjobimpl.fetchInstanceTablename(connectionAmbaridb, instance, ambaridatabase);
+ int sequence = pigjobimpl.fetchSequenceno(connectionAmbaridb, fetchPigTablenameInstance, ambaridatabase);
- connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
- connectionAmbaridb.setAutoCommit(false);
+ for (i = 0; i < pigJobDbPojo.size(); i++) {
- int fetchPigTablenameInstance = pigjobimpl.fetchInstanceTablename(connectionAmbaridb, instance, ambaridatabase);
- int sequence = pigjobimpl.fetchSequenceno(connectionAmbaridb, fetchPigTablenameInstance, ambaridatabase);
+ float calc = ((float) (i + 1)) / pigJobDbPojo.size() * 100;
+ int progressPercentage = Math.round(calc);
- for (i = 0; i < pigJobDbPojo.size(); i++) {
+ migrationresult.setProgressPercentage(progressPercentage);
+ migrationresult.setNumberOfQueryTransfered(i+1);
+ getResourceManager(view).update(migrationresult, jobid);
- float calc = ((float) (i + 1)) / pigJobDbPojo.size() * 100;
- int progressPercentage = Math.round(calc);
- migrationresult.setIsNoQuerySelected("no");
- migrationresult.setProgressPercentage(progressPercentage);
- migrationresult.setNumberOfQueryTransfered(i + 1);
- migrationresult.setTotalNoQuery(pigJobDbPojo.size());
- getResourceManager(view).update(migrationresult, jobid);
+ logger.info("Loop No." + (i + 1));
+ logger.info("________________");
+ logger.info("the title of script " + pigJobDbPojo.get(i).getTitle());
- logger.info("Loop No." + (i + 1));
- logger.info("________________");
- logger.info("the title of script " + pigJobDbPojo.get(i).getTitle());
+ maxCountforPigScript = i + sequence + 1;
- maxCountforPigScript = i + sequence + 1;
+ time = pigjobimpl.getTime();
+ timeIndorder = pigjobimpl.getTimeInorder();
+ epochtime = pigjobimpl.getEpochTime();
- time = pigjobimpl.getTime();
- timeIndorder = pigjobimpl.getTimeInorder();
- epochtime = pigjobimpl.getEpochTime();
+ if(usernames[k].equals("all")) {
+ username = pigJobDbPojo.get(i).getUserName();
+ }
- pigJobDirName = "/user/"+username+"/pig/jobs/" + pigJobDbPojo.get(i).getTitle() + "_" + time + "/";
+ pigJobDirName = "/user/" + username + "/pig/jobs/" + pigJobDbPojo.get(i).getTitle() + "_" + time + "/";
- pigjobimpl.insertRowPigJob(pigJobDirName, maxCountforPigScript, time, timeIndorder, epochtime, pigJobDbPojo.get(i).getTitle(), connectionAmbaridb, fetchPigTablenameInstance, pigJobDbPojo.get(i).getStatus(), instance, i, ambaridatabase, username);
+ pigjobimpl.insertRowPigJob(pigJobDirName, maxCountforPigScript, time, timeIndorder, epochtime, pigJobDbPojo.get(i).getTitle(), connectionAmbaridb, fetchPigTablenameInstance, pigJobDbPojo.get(i).getStatus(), instance, i, ambaridatabase, username);
- if (view.getProperties().get("KerberoseEnabled").equals("y")) {
- pigjobimpl.createDirPigJobSecured(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));
- pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"),username,view.getProperties().get("PrincipalUserName"));
- pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"),username,view.getProperties().get("PrincipalUserName"));
- pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"),username,view.getProperties().get("PrincipalUserName"));
+ if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+ pigjobimpl.createDirPigJobSecured(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), username, view.getProperties().get("PrincipalUserName"));
+ pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username, view.getProperties().get("PrincipalUserName"));
+ pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username, view.getProperties().get("PrincipalUserName"));
+ pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username, view.getProperties().get("PrincipalUserName"));
- } else {
+ } else {
- pigjobimpl.createDirPigJob(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"),username);
- pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"),username);
- pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"),username);
- pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"),username);
+ pigjobimpl.createDirPigJob(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), username);
+ pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username);
+ pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username);
+ pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username);
- }
+ }
- logger.info(pigJobDbPojo.get(i).getTitle() + "has been migrated to Ambari");
+ logger.info(pigJobDbPojo.get(i).getTitle() + "has been migrated to Ambari");
+ }
+ pigjobimpl.updateSequenceno(connectionAmbaridb, maxCountforPigScript, fetchPigTablenameInstance, ambaridatabase);
+ connectionAmbaridb.commit();
+ }
+ logger.info("Migration completed for user " + username);
}
- pigjobimpl.updateSequenceno(connectionAmbaridb, maxCountforPigScript, fetchPigTablenameInstance, ambaridatabase);
- connectionAmbaridb.commit();
- }
-
- } catch (SQLException e) {
- logger.error("sql exception in ambari database:", e);
- try {
- connectionAmbaridb.rollback();
- logger.info("roll back done");
- } catch (SQLException e1) {
- logger.error("roll back exception:", e1);
- }
- } catch (ClassNotFoundException e2) {
- logger.error("class not found exception:", e2);
- } catch (ParseException e) {
- logger.error("ParseException: ", e);
- } catch (URISyntaxException e) {
- logger.error("URISyntaxException", e);
- } catch (PropertyVetoException e) {
- logger.error("PropertyVetoException", e);
- } finally {
- if (null != connectionAmbaridb)
+ migrationresult.setFlag(1);
+ if(totalQueries==0) {
+ migrationresult.setNumberOfQueryTransfered(0);
+ migrationresult.setTotalNoQuery(0);
+ } else {
+ migrationresult.setNumberOfQueryTransfered(totalQueries);
+ migrationresult.setTotalNoQuery(totalQueries);
+ migrationresult.setProgressPercentage(100);
+ }
+ getResourceManager(view).update(migrationresult, jobid);
+ } catch (SQLException e) {
+ logger.error("sql exception in ambari database:", e);
try {
- connectionAmbaridb.close();
- } catch (SQLException e) {
- logger.error("connection closing exception ", e);
+ connectionAmbaridb.rollback();
+ logger.info("roll back done");
+ } catch (SQLException e1) {
+ logger.error("roll back exception:", e1);
}
- }
+ } catch (ClassNotFoundException e2) {
+ logger.error("class not found exception:", e2);
+ } catch (ParseException e) {
+ logger.error("ParseException: ", e);
+ } catch (URISyntaxException e) {
+ logger.error("URISyntaxException", e);
+ } catch (PropertyVetoException e) {
+ logger.error("PropertyVetoException", e);
+ } finally {
+ if (null != connectionAmbaridb)
+ try {
+ connectionAmbaridb.close();
+ } catch (SQLException e) {
+ logger.error("connection closing exception ", e);
+ }
+ }
- logger.info("------------------------------");
- logger.info("pig Job Migration End");
- logger.info("------------------------------");
+ logger.info("------------------------------");
+ logger.info("pig Job Migration End");
+ logger.info("------------------------------");
- long stopTime = System.currentTimeMillis();
- long elapsedTime = stopTime - startTime;
+ long stopTime = System.currentTimeMillis();
+ long elapsedTime = stopTime - startTime;
- migrationresult.setJobtype("hivehistoryquerymigration");
- migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
- getResourceManager(view).update(migrationresult, jobid);
+ migrationresult.setJobtype("hivehistoryquerymigration");
+ migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
+ getResourceManager(view).update(migrationresult, jobid);
- }
+ }
-}
+ }
http://git-wip-us.apache.org/repos/asf/ambari/blob/bea5ebb8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobStartJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobStartJob.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobStartJob.java
index 15f033f..872dbb8 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobStartJob.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobStartJob.java
@@ -52,6 +52,7 @@ public class PigJobStartJob extends Thread{
migrationresult.setIntanceName(instance);
migrationresult.setUserNameofhue(username);
migrationresult.setProgressPercentage(0);
+ migrationresult.setFlag(0);
PigJobMigrationUtility pigjobmigration=new PigJobMigrationUtility();
try {
http://git-wip-us.apache.org/repos/asf/ambari/blob/bea5ebb8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
index eaf5d38..9428908 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
@@ -52,6 +52,7 @@ public class PigSavedScriptStartJob extends Thread{
migrationresult.setIntanceName(instance);
migrationresult.setUserNameofhue(username);
migrationresult.setProgressPercentage(0);
+ migrationresult.setFlag(0);
PigScriptMigrationUtility pigsavedscript =new PigScriptMigrationUtility();
try {