You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by db...@apache.org on 2016/06/29 11:35:38 UTC
[07/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator
to standard view architecture (Pradarttana Panda via dipayanb)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java
new file mode 100644
index 0000000..bcbe4de
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.InstanceModel;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.sql.*;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.instancedetail.*;
+
+
+public class HiveInstanceDetailsUtility {
+
+ public List<InstanceModel> getInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException {
+
+ List<InstanceModel> instancelist = new ArrayList<>();
+ Connection conn = null;
+ conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
+ conn.setAutoCommit(false);
+ PreparedStatement prSt;
+
+ QuerySetAmbariDB ambaridatabase = null;
+
+ if (view.getProperties().get("ambaridrivername").contains("mysql")) {
+ ambaridatabase = new MysqlQuerySetAmbariDB();
+ } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
+ ambaridatabase = new PostgressQuerySetAmbariDB();
+ } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
+ ambaridatabase = new OracleQuerySetAmbariDB();
+ }
+
+ ResultSet rs1 = null;
+ prSt = ambaridatabase.getHiveInstanceDeatil(conn);
+ rs1 = prSt.executeQuery();
+ int i = 0;
+
+ while (rs1.next()) {
+ InstanceModel I = new InstanceModel();
+ I.setInstanceName(rs1.getString(1));
+ I.setId(i);
+ instancelist.add(I);
+ i++;
+ }
+ return instancelist;
+
+ }
+
+ public List<InstanceModel> getAllInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException {
+
+ List<InstanceModel> instancelist = new ArrayList<>();
+ Connection conn = null;
+ Statement stmt = null;
+ conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
+ conn.setAutoCommit(false);
+ PreparedStatement prSt;
+
+ QuerySetAmbariDB ambaridatabase = null;
+
+ if (view.getProperties().get("ambaridrivername").contains("mysql")) {
+ ambaridatabase = new MysqlQuerySetAmbariDB();
+ } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
+ ambaridatabase = new PostgressQuerySetAmbariDB();
+ } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
+ ambaridatabase = new OracleQuerySetAmbariDB();
+ }
+
+ ResultSet rs1 = null;
+ int i = 0;
+ prSt = ambaridatabase.getAllInstanceDeatil(conn);
+ rs1 = prSt.executeQuery();
+
+ while (rs1.next()) {
+ InstanceModel I = new InstanceModel();
+ I.setInstanceName(rs1.getString(1));
+ I.setId(i);
+ instancelist.add(I);
+ i++;
+ }
+ rs1.close();
+ return instancelist;
+
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueDatabaseCheck.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueDatabaseCheck.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueDatabaseCheck.java
new file mode 100644
index 0000000..a5ec758
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueDatabaseCheck.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+
+/**
+ * Service class to check hue database check
+ */
+@Path("/huedatabases")
+
+public class HueDatabaseCheck {
+
+ @Inject
+ ViewContext view;
+
+ @GET
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response hueDatabase() throws IOException {
+ JSONObject response = new JSONObject();
+ try {
+ response.put("huedatabase", ConfigurationCheckImplementation.checkHueDatabaseConnection(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")));
+ return Response.ok(response).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueHttpUrlCheck.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueHttpUrlCheck.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueHttpUrlCheck.java
new file mode 100644
index 0000000..dd5f409
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueHttpUrlCheck.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+/**
+ * Service class to check hue http url
+ */
+@Path("/huehttpurls")
+
+public class HueHttpUrlCheck {
+
+ @Inject
+ ViewContext view;
+
+ @GET
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response hueHtttpUrl() {
+
+ JSONObject response = new JSONObject();
+ try {
+ response.put("huehttpurl", ConfigurationCheckImplementation.checkConfigurationForHue(view.getProperties().get("Hue_URL")));
+ return Response.ok(response).build();
+
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueWebHdfsCheck.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueWebHdfsCheck.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueWebHdfsCheck.java
new file mode 100644
index 0000000..7b5e7a0
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueWebHdfsCheck.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.net.URISyntaxException;
+
+/**
+ * Service class to check hueWebHdfs
+ */
+
+@Path("/huewebhdfsurls")
+
+public class HueWebHdfsCheck {
+
+ @Inject
+ ViewContext view;
+
+ @GET
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response hueWebHdfs() throws IOException, URISyntaxException {
+
+ JSONObject response = new JSONObject();
+ try {
+ response.put("huewebhdfsurl", ConfigurationCheckImplementation.checkNamenodeURIConnectionforHue(view.getProperties().get("namenode_URI_Hue")));
+ return Response.ok(response).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsAmbari.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsAmbari.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsAmbari.java
new file mode 100644
index 0000000..cc14385
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsAmbari.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.sql.SQLException;
+
+/**
+ * Service class to fetch Piginstance detail
+ */
+
+@Path("/piginstancedetails")
+
+public class PigInstanceDetailsAmbari {
+
+ @Inject
+ ViewContext view;
+
+ @GET
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response instancelist() throws IOException, PropertyVetoException, SQLException {
+
+
+ PigInstanceDetailsUtility instance=new PigInstanceDetailsUtility();
+
+ JSONObject response = new JSONObject();
+ response.put("piginstancedetails",instance.getInstancedetails(view));
+ return Response.ok(response).build();
+
+
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsUtility.java
new file mode 100644
index 0000000..d3cfb97
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsUtility.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.InstanceModel;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.sql.*;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.instancedetail.*;
+
+/**
+ * Utility class to fetch Pig Instance details
+ */
+
+public class PigInstanceDetailsUtility {
+
+ public List<InstanceModel> getInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException {
+
+ List<InstanceModel> instancelist = new ArrayList<>();
+ Connection conn = null;
+ Statement stmt = null;
+ PreparedStatement prSt;
+ conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
+ conn.setAutoCommit(false);
+ stmt = conn.createStatement();
+ int i = 0;
+
+ QuerySetAmbariDB ambaridatabase = null;
+
+ if (view.getProperties().get("ambaridrivername").contains("mysql")) {
+ ambaridatabase = new MysqlQuerySetAmbariDB();
+ } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
+ ambaridatabase = new PostgressQuerySetAmbariDB();
+ } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
+ ambaridatabase = new OracleQuerySetAmbariDB();
+ }
+
+ ResultSet rs1 = null;
+
+ prSt = ambaridatabase.getAllPigInstance(conn);
+
+ rs1 = prSt.executeQuery();
+
+ while (rs1.next()) {
+ InstanceModel I = new InstanceModel();
+ I.setInstanceName(rs1.getString(1));
+ I.setId(i);
+ instancelist.add(I);
+ i++;
+ }
+ rs1.close();
+ stmt.close();
+ return instancelist;
+
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailHue.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailHue.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailHue.java
new file mode 100644
index 0000000..d993f3a
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailHue.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.sql.SQLException;
+
+/**
+ * Service class to fetch user detail
+ */
+
+@Path("/usersdetails")
+
+public class UserDetailHue {
+
+ @Inject
+ ViewContext view;
+
+ @GET
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response userList() throws IOException, PropertyVetoException, SQLException {
+
+ UserDetailsUtility user=new UserDetailsUtility();
+
+ JSONObject response = new JSONObject();
+ response.put("usersdetails",user.getUserDetails(view));
+ return Response.ok(response).build();
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailsUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailsUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailsUtility.java
new file mode 100644
index 0000000..09f175b
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailsUtility.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.UserModel;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.sql.*;
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.userdetails.*;
+
+
+public class UserDetailsUtility {
+
+
+ public List<UserModel> getUserDetails(ViewContext view) throws PropertyVetoException, SQLException, IOException {
+
+ List<UserModel> userlist=new ArrayList<>();
+ Connection conn = null;
+ Statement stmt = null;
+ conn = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"),view.getProperties().get("huejdbcurl"),view.getProperties().get("huedbusername"),view.getProperties().get("huedbpassword")).getConnection();
+ conn.setAutoCommit(false);
+ stmt = conn.createStatement();
+ UserModel all=new UserModel();
+ all.setId(-1);
+ all.setUsername("all");
+ PreparedStatement prSt;
+ userlist.add(all);
+ ResultSet rs1=null;
+
+ QuerySet huedatabase = null;
+
+ if (view.getProperties().get("huedrivername").contains("mysql")) {
+ huedatabase = new MysqlQuerySet();
+ } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
+ huedatabase = new PostgressQuerySet();
+ } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
+ huedatabase = new SqliteQuerySet();
+ } else if (view.getProperties().get("huedrivername").contains("oracle")) {
+ huedatabase = new OracleQuerySet();
+ }
+
+ prSt = huedatabase.getUserDetails(conn);
+
+ rs1 = prSt.executeQuery();
+
+ while (rs1.next()) {
+ UserModel I=new UserModel();
+ I.setUsername(rs1.getString(2));
+ I.setId(rs1.getInt(1));
+ System.out.println(rs1.getString(2));
+ userlist.add(I);
+ }
+ rs1.close();
+ stmt.close();
+ return userlist;
+
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
new file mode 100755
index 0000000..bdcf293
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
@@ -0,0 +1,255 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.migration.hive.historyquery;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.migration.InitiateJobMigration;
+import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.MysqlQuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.OracleQuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.PostgressQuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.QuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset.*;
+import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.apache.log4j.Logger;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.text.ParseException;
+import java.util.ArrayList;
+
+
+public class HiveHistoryMigrationUtility {
+
+
+ protected MigrationResourceManager resourceManager = null;
+
+ public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
+ if (resourceManager == null) {
+ resourceManager = new MigrationResourceManager(view);
+ }
+ return resourceManager;
+ }
+
+
+ public void hiveHistoryQueryMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound {
+
+ InitiateJobMigration migrationservice = new InitiateJobMigration();
+
+ long startTime = System.currentTimeMillis();
+
+ final Logger logger = Logger.getLogger(HiveHistoryMigrationUtility.class);
+ Connection connectionHuedb = null;
+ Connection connectionAmbaridb = null;
+
+ logger.info("--------------------------------------");
+ logger.info("hive History query Migration started");
+ logger.info("--------------------------------------");
+ logger.info("start date: " + startDate);
+ logger.info("enddate date: " + endDate);
+ logger.info("instance is: " + username);
+ logger.info("hue username is : " + instance);
+
+ MigrationModel model = new MigrationModel();
+
+ int maxCountOfAmbariDb, i = 0;
+ String time = null;
+ Long epochTime = null;
+ String dirNameforHiveHistroy;
+ ArrayList<HiveModel> dbpojoHiveHistoryQuery = new ArrayList<HiveModel>();
+
+ HiveHistoryQueryMigrationImplementation hiveHistoryQueryImpl = new HiveHistoryQueryMigrationImplementation();// creating objects of HiveHistroy implementation
+
+ QuerySet huedatabase = null;
+
+ /*instanciang queryset
+ * according to driver name
+ */
+
+ if (view.getProperties().get("huedrivername").contains("mysql")) {
+ huedatabase = new MysqlQuerySet();
+ } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
+ huedatabase = new PostgressQuerySet();
+ } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
+ huedatabase = new SqliteQuerySet();
+ } else if (view.getProperties().get("huedrivername").contains("oracle")) {
+ huedatabase = new OracleQuerySet();
+ }
+
+
+ QuerySetAmbariDB ambaridatabase = null;
+
+
+ if (view.getProperties().get("ambaridrivername").contains("mysql")) {
+ ambaridatabase = new MysqlQuerySetAmbariDB();
+ } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
+ ambaridatabase = new PostgressQuerySetAmbariDB();
+ } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
+ ambaridatabase = new OracleQuerySetAmbariDB();
+ }
+
+
+ try {
+
+ connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();
+
+ dbpojoHiveHistoryQuery = hiveHistoryQueryImpl.fetchFromHue(username, startDate, endDate, connectionHuedb, huedatabase);
+
+ for (int j = 0; j < dbpojoHiveHistoryQuery.size(); j++) {
+ logger.info("the query fetched from hue" + dbpojoHiveHistoryQuery.get(j).getQuery());
+
+ }
+
+ /* if No migration query selected from Hue Database according to our search criteria */
+
+ if (dbpojoHiveHistoryQuery.size() == 0) {
+ migrationresult.setIsNoQuerySelected("yes");
+ migrationresult.setProgressPercentage(0);
+ migrationresult.setNumberOfQueryTransfered(0);
+ migrationresult.setTotalNoQuery(dbpojoHiveHistoryQuery.size());
+ getResourceManager(view).update(migrationresult, jobid);
+ logger.info("No queries has been selected acccording to your criteria");
+
+ } else {
+ /* If hive queries are selected based on our search criteria */
+
+ connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
+ connectionAmbaridb.setAutoCommit(false);
+
+ // for each queries fetched from Hue database//
+
+ for (i = 0; i < dbpojoHiveHistoryQuery.size(); i++) {
+
+ float calc = ((float) (i + 1)) / dbpojoHiveHistoryQuery.size() * 100;
+ int progressPercentage = Math.round(calc);
+ migrationresult.setIsNoQuerySelected("no");
+ migrationresult.setProgressPercentage(progressPercentage);
+ migrationresult.setNumberOfQueryTransfered(i + 1);
+ migrationresult.setTotalNoQuery(dbpojoHiveHistoryQuery.size());
+ getResourceManager(view).update(migrationresult, jobid);
+
+ logger.info("_____________________");
+ logger.info("Loop No." + (i + 1));
+ logger.info("_____________________");
+ logger.info("Hue query that has been fetched" + dbpojoHiveHistoryQuery.get(i).getQuery());
+ int id = 0;
+
+ id = hiveHistoryQueryImpl.fetchInstanceTablename(connectionAmbaridb, instance, ambaridatabase); // feching table name according to the given instance name
+
+ logger.info("Table name has been fetched from intance name");
+
+ hiveHistoryQueryImpl.writetoFileQueryhql(dbpojoHiveHistoryQuery.get(i).getQuery(), ConfigurationCheckImplementation.getHomeDir());// writing to .hql file to a temp file on local disk
+
+ logger.info(".hql file created in Temp directory");
+
+ hiveHistoryQueryImpl.writetoFileLogs(ConfigurationCheckImplementation.getHomeDir());// writing to logs file to a temp file on local disk
+
+ logger.info("Log file created in Temp directory");
+
+ maxCountOfAmbariDb = (hiveHistoryQueryImpl.fetchMaximumIdfromAmbaridb(connectionAmbaridb, id, ambaridatabase) + 1);// fetching the maximum count for ambari db to insert
+
+ time = hiveHistoryQueryImpl.getTime();// getting the system current time.
+
+ epochTime = hiveHistoryQueryImpl.getEpochTime();// getting system time as epoch format
+
+ dirNameforHiveHistroy = "/user/admin/migration/jobs/migration-job-" + maxCountOfAmbariDb + "-" + time + "/";// creating the directory name
+
+ logger.info("Directory name where .hql will be saved: " + dirNameforHiveHistroy);
+
+ hiveHistoryQueryImpl.insertRowinAmbaridb(dirNameforHiveHistroy, maxCountOfAmbariDb, epochTime, connectionAmbaridb, id, instance, i, ambaridatabase);// inserting in ambari database
+
+ if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+
+ logger.info("kerberose enabled");
+ hiveHistoryQueryImpl.createDirKerberorisedSecured(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in kerborized secured hdfs
+ logger.info("Directory created in hdfs");
+ hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to kerborized hdfs
+ hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to kerborized hdfs
+ } else {
+
+ logger.info("kerberose not enabled");
+ hiveHistoryQueryImpl.createDir(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
+ logger.info("Directory created in hdfs");
+ hiveHistoryQueryImpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to hdfs
+ hiveHistoryQueryImpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to hdfs
+ }
+
+ }
+ connectionAmbaridb.commit();
+
+
+ }
+ } catch (SQLException e) {
+ logger.error("Sql exception in ambari database: ", e);
+ try {
+ connectionAmbaridb.rollback();
+ model.setIfSuccess(false);
+ logger.error("Sql statement are Rolledback");
+ } catch (SQLException e1) {
+ logger.error("Sql rollback exception in ambari database",
+ e1);
+ }
+ } catch (ClassNotFoundException e) {
+ logger.error("Class not found :- ", e);
+ } catch (ParseException e) {
+ logger.error("Parse Exception : ", e);
+ } catch (URISyntaxException e) {
+ logger.error("URI Syntax Exception: ", e);
+ } catch (PropertyVetoException e) {
+ logger.error("PropertyVetoException: ", e);
+ } catch (ItemNotFound itemNotFound) {
+ itemNotFound.printStackTrace();
+ } finally {
+ if (connectionAmbaridb != null) try {
+ connectionAmbaridb.close();
+ } catch (SQLException e) {
+ logger.error("Exception in closing the connection :", e);
+ }
+ }
+ //deleteing the temprary files that are created while execution
+ hiveHistoryQueryImpl.deleteFileQueryhql(ConfigurationCheckImplementation.getHomeDir());
+ hiveHistoryQueryImpl.deleteFileQueryLogs(ConfigurationCheckImplementation.getHomeDir());
+
+ //session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
+ logger.info("------------------------------");
+ logger.info("hive History query Migration Ends");
+ logger.info("------------------------------");
+
+ long stopTime = System.currentTimeMillis();
+ long elapsedTime = stopTime - startTime;
+
+ migrationresult.setJobtype("hivehistoryquerymigration");
+ migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
+ getResourceManager(view).update(migrationresult, jobid);
+
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
new file mode 100644
index 0000000..a0182f6
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
@@ -0,0 +1,551 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.hive.historyquery;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.QuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset.QuerySet;
+import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+
+import java.io.*;
+import java.net.URISyntaxException;
+import java.security.PrivilegedExceptionAction;
+import java.sql.*;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+
+public class HiveHistoryQueryMigrationImplementation {
+
+ static final Logger logger = Logger.getLogger(HiveHistoryQueryMigrationImplementation.class);
+
+ public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException {
+
+ Date dNow = new Date();
+ SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
+ String currentDate = ft.format(dNow);
+
+ XMLOutputter xmlOutput = new XMLOutputter();
+ xmlOutput.setFormat(Format.getPrettyFormat());
+
+ File xmlfile = new File(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml");
+
+ if (xmlfile.exists()) {
+ String iteration = Integer.toString(i + 1);
+ SAXBuilder builder = new SAXBuilder();
+ Document doc;
+ try {
+ doc = (Document) builder.build(xmlfile);
+ Element rootNode = doc.getRootElement();
+ Element record = new Element("RevertRecord");
+ record.setAttribute(new Attribute("id", iteration));
+ record.addContent(new Element("datetime").setText(currentDate.toString()));
+ record.addContent(new Element("dirname").setText(dirname));
+ record.addContent(new Element("instance").setText(instance));
+ record.addContent(new Element("query").setText(content));
+ rootNode.addContent(record);
+ xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
+
+ } catch (JDOMException e) {
+ logger.error("JDOMException", e);
+
+ }
+
+ } else {
+
+ try {
+ String iteration = Integer.toString(i + 1);
+ Element revertrecord = new Element("RevertChangePage");
+ Document doc = new Document(revertrecord);
+ doc.setRootElement(revertrecord);
+
+ Element record = new Element("RevertRecord");
+ record.setAttribute(new Attribute("id", iteration));
+ record.addContent(new Element("datetime").setText(currentDate.toString()));
+ record.addContent(new Element("dirname").setText(dirname));
+ record.addContent(new Element("instance").setText(instance));
+ record.addContent(new Element("query").setText(content));
+ doc.getRootElement().addContent(record);
+ xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
+ } catch (IOException io) {
+ logger.error("JDOMException", io);
+ }
+
+ }
+
+ }
+
+ public int fetchMaximumIdfromAmbaridb(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws SQLException {
+
+ String ds_id = null;
+ ResultSet rs = null;
+ PreparedStatement prSt = null;
+
+ prSt = ambaridatabase.getMaxDsIdFromTableId(c, id);
+
+ rs = prSt.executeQuery();
+
+ while (rs.next()) {
+ ds_id = rs.getString("max");
+ }
+
+ int num;
+ if (ds_id == null) {
+ num = 1;
+ } else {
+ num = Integer.parseInt(ds_id);
+ }
+ return num;
+ }
+
+
+ public void insertRowinAmbaridb(String dirname, int maxcount, long epochtime, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
+
+ String maxcount1 = Integer.toString(maxcount);
+ String epochtime1 = Long.toString(epochtime);
+ PreparedStatement prSt = null;
+ String revsql = null;
+
+ prSt = ambaridatabase.insertToHiveHistory(c, id, maxcount1, epochtime, dirname);
+
+ logger.info("The actual insert statement is " + prSt);
+
+ prSt.executeUpdate();
+
+ revsql = ambaridatabase.RevertSql(id, maxcount1);
+
+ logger.info("adding revert sql hive history");
+
+ wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+
+
+ }
+
+ public int fetchInstanceTablename(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
+
+ String ds_id = new String();
+ int id = 0;
+ Statement stmt = null;
+ PreparedStatement prSt = null;
+
+
+ ResultSet rs = null;
+
+
+ prSt = ambaridatabase.getTableIdFromInstanceName(c, instance);
+
+ logger.info("sql statement to fetch is from ambari instance:= = " + prSt);
+
+ rs = prSt.executeQuery();
+
+ while (rs.next()) {
+ id = rs.getInt("id");
+ }
+ return id;
+ }
+
+ public long getEpochTime() throws ParseException {
+ long seconds = System.currentTimeMillis() / 1000l;
+ return seconds;
+
+ }
+
+ public String getTime() throws ParseException {
+ int day, month, year;
+ int second, minute, hour;
+ int milisecond;
+ GregorianCalendar date = new GregorianCalendar();
+
+ day = date.get(Calendar.DAY_OF_MONTH);
+ month = date.get(Calendar.MONTH);
+ year = date.get(Calendar.YEAR);
+
+ second = date.get(Calendar.SECOND);
+ minute = date.get(Calendar.MINUTE);
+ hour = date.get(Calendar.HOUR);
+ milisecond = date.get(Calendar.MILLISECOND);
+
+ String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
+ + minute;
+ String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
+ + minute + "-" + second + "-" + milisecond;
+ SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+ Date date1 = df.parse(s1);
+ long epoch = date1.getTime();
+ return s;
+
+ }
+
+ public ArrayList<HiveModel> fetchFromHue(String username, String startdate, String endtime, Connection connection, QuerySet huedatabase) throws ClassNotFoundException, SQLException {
+ int id = 0;
+ int i = 0;
+ ArrayList<HiveModel> hiveArrayList = new ArrayList<HiveModel>();
+
+
+ try {
+ connection.setAutoCommit(false);
+ PreparedStatement prSt = null;
+ Statement statement = connection.createStatement();
+ String query;
+ ResultSet rs;
+
+ ResultSet rs1 = null;
+ if (username.equals("all")) {
+ } else {
+
+
+ prSt = huedatabase.getUseridfromUserName(connection, username);
+
+ rs = prSt.executeQuery();
+
+ while (rs.next()) {
+ id = rs.getInt("id");
+ }
+ }
+
+ if (startdate.equals("") && endtime.equals("")) {
+ if (username.equals("all")) {
+ prSt = huedatabase.getQueriesNoStartDateNoEndDateAllUser(connection);
+ } else {
+ prSt = huedatabase.getQueriesNoStartDateNoEndDate(connection, id);
+
+ }
+
+ } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+ if (username.equals("all")) {
+ prSt = huedatabase.getQueriesNoStartDateYesEndDateAllUser(connection, endtime);
+ } else {
+ prSt = huedatabase.getQueriesNoStartDateYesEndDate(connection, id, endtime);
+
+ }
+ } else if (!(startdate.equals("")) && (endtime.equals(""))) {
+ if (username.equals("all")) {
+ prSt = huedatabase.getQueriesYesStartDateNoEndDateAllUser(connection, startdate);
+ } else {
+ prSt = huedatabase.getQueriesYesStartDateNoEndDate(connection, id, startdate);
+
+ }
+
+ } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+ if (username.equals("all")) {
+ prSt = huedatabase.getQueriesYesStartDateYesEndDateAllUser(connection, startdate, endtime);
+ } else {
+ prSt = huedatabase.getQueriesYesStartDateYesEndDate(connection, id, startdate, endtime);
+ }
+ }
+
+ rs1 = prSt.executeQuery();
+
+ while (rs1.next()) {
+ HiveModel hivepojo = new HiveModel();
+ query = rs1.getString("query");
+ hivepojo.setQuery(query);
+ hiveArrayList.add(hivepojo);
+ i++;
+ }
+
+ connection.commit();
+
+ } catch (SQLException e) {
+ connection.rollback();
+
+ } finally {
+ try {
+ if (connection != null)
+ connection.close();
+ } catch (SQLException e) {
+ logger.error("Sql exception error: " + e);
+ }
+ }
+ return hiveArrayList;
+
+ }
+
+ public void writetoFileQueryhql(String content, String homedir) {
+ try {
+ File file = new File(homedir + "query.hql");
+ // if file doesnt exists, then create it
+ if (!file.exists()) {
+ file.createNewFile();
+ }
+ FileWriter fw = new FileWriter(file.getAbsoluteFile());
+ BufferedWriter bw = new BufferedWriter(fw);
+ bw.write(content);
+ bw.close();
+ } catch (IOException e) {
+ logger.error("IOException", e);
+ }
+
+ }
+
+ public void deleteFileQueryhql(String homedir) {
+ try {
+ File file = new File(homedir + "query.hql");
+
+ if (file.delete()) {
+ logger.info("temporary hql file deleted");
+ } else {
+ logger.info("temporary hql file delete failed");
+ }
+
+ } catch (Exception e) {
+
+ logger.error("File Exception ", e);
+
+ }
+
+ }
+
+ public void deleteFileQueryLogs(String homedir) {
+ try {
+ File file = new File(homedir + "logs");
+
+ if (file.delete()) {
+ logger.info("temporary logs file deleted");
+ } else {
+ logger.info("temporary logs file delete failed");
+ }
+
+ } catch (Exception e) {
+
+ logger.error("File Exception ", e);
+
+ }
+
+ }
+
+ public void writetoFileLogs(String homedir) {
+ try {
+ String content = "";
+ File file = new File(homedir + "logs");
+ // if file doesnt exists, then create it
+ if (!file.exists()) {
+ file.createNewFile();
+ }
+ FileWriter fw = new FileWriter(file.getAbsoluteFile());
+ BufferedWriter bw = new BufferedWriter(fw);
+ bw.write(content);
+ bw.close();
+ } catch (IOException e) {
+ logger.error("IOException", e);
+ }
+
+ }
+
+ public void createDir(final String dir, final String namenodeuri) throws IOException,
+ URISyntaxException {
+
+ try {
+ final Configuration conf = new Configuration();
+
+ conf.set("fs.hdfs.impl",
+ org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+ );
+ conf.set("fs.file.impl",
+ org.apache.hadoop.fs.LocalFileSystem.class.getName()
+ );
+ conf.set("fs.defaultFS", namenodeuri);
+ conf.set("hadoop.job.ugi", "hdfs");
+ UserGroupInformation.setConfiguration(conf);
+
+ UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+ ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+
+ public Boolean run() throws Exception {
+
+ FileSystem fs = FileSystem.get(conf);
+ Path src = new Path(dir);
+ Boolean b = fs.mkdirs(src);
+ return b;
+ }
+ });
+ } catch (Exception e) {
+ logger.error("Exception in Webhdfs", e);
+ }
+ }
+
+ public void createDirKerberorisedSecured(final String dir, final String namenodeuri) throws IOException,
+ URISyntaxException {
+
+ try {
+ final Configuration conf = new Configuration();
+
+ conf.set("fs.hdfs.impl",
+ org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+ );
+ conf.set("fs.file.impl",
+ org.apache.hadoop.fs.LocalFileSystem.class.getName()
+ );
+ conf.set("fs.defaultFS", namenodeuri);
+ conf.set("hadoop.job.ugi", "hdfs");
+ conf.set("hadoop.security.authentication", "Kerberos");
+ UserGroupInformation.setConfiguration(conf);
+ UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+ ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+
+ public Boolean run() throws Exception {
+ FileSystem fs = FileSystem.get(conf);
+ Path src = new Path(dir);
+ Boolean b = fs.mkdirs(src);
+ return b;
+ }
+ });
+ } catch (Exception e) {
+ logger.error("Exception in Webhdfs", e);
+ }
+ }
+
+
+ public void putFileinHdfs(final String source, final String dest, final String namenodeuri)
+ throws IOException {
+
+ try {
+ UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+ ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+ public Void run() throws Exception {
+
+ Configuration conf = new Configuration();
+ conf.set("fs.defaultFS", namenodeuri);
+ conf.set("hadoop.job.ugi", "hdfs");
+ conf.set("fs.hdfs.impl",
+ org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+ );
+ conf.set("fs.file.impl",
+ org.apache.hadoop.fs.LocalFileSystem.class.getName()
+ );
+ FileSystem fileSystem = FileSystem.get(conf);
+
+ String filename = source.substring(
+ source.lastIndexOf('/') + 1, source.length());
+ String dest1;
+ if (dest.charAt(dest.length() - 1) != '/') {
+ dest1 = dest + "/" + filename;
+ } else {
+ dest1 = dest + filename;
+ }
+
+ Path path = new Path(dest1);
+ if (fileSystem.exists(path)) {
+
+ }
+ // Path pathsource = new Path(source);
+ FSDataOutputStream out = fileSystem.create(path);
+
+ InputStream in = new BufferedInputStream(
+ new FileInputStream(new File(source)));
+
+ byte[] b = new byte[1024];
+ int numBytes = 0;
+ while ((numBytes = in.read(b)) > 0) {
+ out.write(b, 0, numBytes);
+ }
+ in.close();
+ out.close();
+ fileSystem.close();
+ return null;
+ }
+ });
+ } catch (Exception e) {
+ logger.error("Webhdfs exception", e);
+ }
+
+ }
+
+ public void putFileinHdfsKerborizedSecured(final String source, final String dest, final String namenodeuri)
+ throws IOException {
+
+ try {
+
+ final Configuration conf = new Configuration();
+
+ conf.set("fs.hdfs.impl",
+ org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+ );
+ conf.set("fs.file.impl",
+ org.apache.hadoop.fs.LocalFileSystem.class.getName()
+ );
+ conf.set("fs.defaultFS", namenodeuri);
+ conf.set("hadoop.job.ugi", "hdfs");
+ conf.set("hadoop.security.authentication", "Kerberos");
+ UserGroupInformation.setConfiguration(conf);
+ UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+ ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+ public Void run() throws Exception {
+
+ FileSystem fileSystem = FileSystem.get(conf);
+
+ String filename = source.substring(
+ source.lastIndexOf('/') + 1, source.length());
+ String dest1;
+ if (dest.charAt(dest.length() - 1) != '/') {
+ dest1 = dest + "/" + filename;
+ } else {
+ dest1 = dest + filename;
+ }
+
+ Path path = new Path(dest1);
+ if (fileSystem.exists(path)) {
+
+ }
+
+ FSDataOutputStream out = fileSystem.create(path);
+
+ InputStream in = new BufferedInputStream(
+ new FileInputStream(new File(source)));
+
+ byte[] b = new byte[1024];
+ int numBytes = 0;
+ while ((numBytes = in.read(b)) > 0) {
+ out.write(b, 0, numBytes);
+ }
+ in.close();
+ out.close();
+ fileSystem.close();
+ return null;
+ }
+ });
+ } catch (Exception e) {
+ logger.error("Webhdfs exception", e);
+
+ }
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryStartJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryStartJob.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryStartJob.java
new file mode 100644
index 0000000..5228bf6
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryStartJob.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration.hive.historyquery;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.json.simple.JSONObject;
+
+import java.io.IOException;
+
+
+public class HiveHistoryStartJob extends Thread{
+
+ String username;
+ String instance;
+ String startdate;
+ String enddate;
+ String jobid;
+ ViewContext view;
+
+ public HiveHistoryStartJob(String username, String instance, String startdate, String enddate, String jobid, ViewContext view) {
+ this.username = username;
+ this.instance=instance;
+ this.startdate=startdate;
+ this.enddate=enddate;
+ this.jobid=jobid;
+ this.view=view;
+ }
+
+ @Override
+ public void run() {
+
+ MigrationResponse migrationresult=new MigrationResponse();
+
+ migrationresult.setId(jobid);
+ migrationresult.setIntanceName(instance);
+ migrationresult.setUserNameofhue(username);
+ migrationresult.setProgressPercentage(0);
+
+ JSONObject response = new JSONObject();
+
+ /**
+ * creating a separete thread
+ */
+
+ HiveHistoryMigrationUtility hivehistoryquery=new HiveHistoryMigrationUtility();
+ try {
+ hivehistoryquery.hiveHistoryQueryMigration(username,instance,startdate,enddate,view,migrationresult,jobid);
+ }
+ catch (IOException e) {
+ e.printStackTrace();
+ } catch (ItemNotFound itemNotFound) {
+ itemNotFound.printStackTrace();
+ }
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4a9c28/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
new file mode 100644
index 0000000..7bd48b2
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
@@ -0,0 +1,673 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery;
+
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.QuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset.QuerySet;
+import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+import org.json.JSONObject;
+
+import java.io.*;
+import java.net.URISyntaxException;
+import java.nio.charset.Charset;
+import java.security.PrivilegedExceptionAction;
+import java.sql.*;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+
+public class HiveSavedQueryMigrationImplementation {
+
+ static final Logger logger = Logger.getLogger(HiveSavedQueryMigrationImplementation.class);
+
+ private static String readAll(Reader rd) throws IOException {
+ StringBuilder sb = new StringBuilder();
+ int cp;
+ while ((cp = rd.read()) != -1) {
+ sb.append((char) cp);
+ }
+ return sb.toString();
+ }
+
+ public void wrtitetoalternatesqlfile(String dirname, String content,
+ String instance, int i) throws IOException {
+
+ Date dNow = new Date();
+ SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
+ String currentDate = ft.format(dNow);
+
+ XMLOutputter xmlOutput = new XMLOutputter();
+
+ xmlOutput.setFormat(Format.getPrettyFormat());
+
+ File xmlfile = new File(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml");
+
+ if (xmlfile.exists()) {
+ String iteration = Integer.toString(i + 1);
+ SAXBuilder builder = new SAXBuilder();
+ Document doc;
+ try {
+ doc = (Document) builder.build(xmlfile);
+
+ Element rootNode = doc.getRootElement();
+
+ Element record = new Element("RevertRecord");
+ record.setAttribute(new Attribute("id", iteration));
+ record.addContent(new Element("datetime").setText(currentDate.toString()));
+ record.addContent(new Element("dirname").setText(dirname));
+ record.addContent(new Element("instance").setText(instance));
+ record.addContent(new Element("query").setText(content));
+
+ rootNode.addContent(record);
+ xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
+
+ } catch (JDOMException e) {
+ // TODO Auto-generated catch block
+ logger.error("JDOMException: ", e);
+ }
+
+ } else {
+
+ try {
+ String iteration = Integer.toString(i + 1);
+ Element revertrecord = new Element("RevertChangePage");
+ Document doc = new Document(revertrecord);
+ doc.setRootElement(revertrecord);
+
+ Element record = new Element("RevertRecord");
+ record.setAttribute(new Attribute("id", iteration));
+ record.addContent(new Element("datetime").setText(currentDate
+ .toString()));
+ record.addContent(new Element("dirname").setText(dirname));
+ record.addContent(new Element("instance").setText(instance));
+ record.addContent(new Element("query").setText(content));
+
+ doc.getRootElement().addContent(record);
+
+ xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
+
+ } catch (IOException io) {
+
+ }
+
+ }
+
+ }
+
+ public int fetchMaxidforSavedQueryHive(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws SQLException {
+
+ String ds_id = null;
+ ResultSet rs = null;
+ PreparedStatement prSt = null;
+
+ prSt = ambaridatabase.getMaxDsIdFromTableIdSavedquery(c, id);
+
+ rs = prSt.executeQuery();
+
+ while (rs.next()) {
+ ds_id = rs.getString("max");
+ }
+
+ int num;
+ if (ds_id == null) {
+ num = 1;
+ } else {
+ num = Integer.parseInt(ds_id);
+ }
+ return num;
+ }
+
+ public int fetchInstancetablenameForSavedqueryHive(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
+
+ String ds_id = new String();
+ int id = 0;
+ Statement stmt = null;
+ PreparedStatement prSt = null;
+
+
+ ResultSet rs = null;
+
+
+ prSt = ambaridatabase.getTableIdFromInstanceNameSavedquery(c, instance);
+
+ logger.info("sql statement to fetch is from ambari instance:= = " + prSt);
+
+ rs = prSt.executeQuery();
+
+ while (rs.next()) {
+ id = rs.getInt("id");
+ }
+ return id;
+ }
+
+ public int fetchInstanceTablenameHiveHistory(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
+ String ds_id = new String();
+ int id = 0;
+ Statement stmt = null;
+ PreparedStatement prSt = null;
+
+
+ ResultSet rs = null;
+
+
+ prSt = ambaridatabase.getTableIdFromInstanceNameHistoryquery(c, instance);
+
+ logger.info("sql statement to fetch is from ambari instance:= = " + prSt);
+
+ rs = prSt.executeQuery();
+
+ while (rs.next()) {
+ id = rs.getInt("id");
+ }
+ return id;
+
+ }
+
+ public int fetchMaxdsidFromHiveHistory(Connection c, int id, QuerySetAmbariDB ambaridatabase)
+ throws SQLException {
+
+ String ds_id = null;
+ ResultSet rs = null;
+ PreparedStatement prSt = null;
+
+ prSt = ambaridatabase.getMaxDsIdFromTableIdHistoryquery(c, id);
+
+ rs = prSt.executeQuery();
+
+ while (rs.next()) {
+ ds_id = rs.getString("max");
+ }
+
+ int num;
+ if (ds_id == null) {
+ num = 1;
+ } else {
+ num = Integer.parseInt(ds_id);
+ }
+ return num;
+ }
+
+
+ /**/
+ public void insertRowHiveHistory(String dirname, int maxcount, long epochtime, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase)
+ throws SQLException, IOException {
+
+ String maxcount1 = Integer.toString(maxcount);
+ String epochtime1 = Long.toString(epochtime);
+ PreparedStatement prSt = null;
+ String revsql = null;
+
+ prSt = ambaridatabase.insertToHiveHistory(c, id, maxcount1, epochtime, dirname);
+
+ System.out.println("the actual query is " + prSt);
+
+ logger.info("The actual insert statement is " + prSt);
+
+ prSt.executeUpdate();
+
+ revsql = ambaridatabase.revertSqlHistoryQuery(id, maxcount1);
+
+ logger.info("adding revert sqlsavedquery in hivehistory ");
+
+ wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+ }
+
+ public void insertRowinSavedQuery(int maxcount, String database, String dirname, String query, String name, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
+
+ String maxcount1 = Integer.toString(maxcount);
+ String revsql = null;
+
+ PreparedStatement prSt = null;
+
+ prSt = ambaridatabase.insertToHiveSavedQuery(c, id, maxcount1, database, dirname, query, name);
+
+ System.out.println("the actual query is " + prSt);
+
+ logger.info("The actual insert statement is " + prSt);
+
+ prSt.executeUpdate();
+
+ revsql = ambaridatabase.revertSqlSavedQuery(id, maxcount1);
+
+ logger.info("adding revert sqlsavedquery ");
+
+ wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+
+ }
+
+ public long getEpochTime() throws ParseException {
+
+ long seconds = System.currentTimeMillis() / 1000l;
+ return seconds;
+
+ }
+
+ public String getTime() throws ParseException {
+ int day, month, year;
+ int second, minute, hour;
+ int milisecond;
+ GregorianCalendar date = new GregorianCalendar();
+
+ day = date.get(Calendar.DAY_OF_MONTH);
+ month = date.get(Calendar.MONTH);
+ year = date.get(Calendar.YEAR);
+
+ second = date.get(Calendar.SECOND);
+ minute = date.get(Calendar.MINUTE);
+ hour = date.get(Calendar.HOUR);
+ milisecond = date.get(Calendar.MILLISECOND);
+
+ String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
+ + minute;
+ String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
+ + minute + "-" + second + "-" + milisecond;
+ SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+ Date date1 = df.parse(s1);
+ long epoch = date1.getTime();
+
+ return s;
+
+ }
+
+ public ArrayList<HiveModel> fetchFromHuedb(String username, String startdate, String endtime, Connection connection, QuerySet huedatabase)
+ throws ClassNotFoundException, IOException {
+ int id = 0;
+ int i = 0;
+ String[] query = new String[100];
+ ArrayList<HiveModel> hiveArrayList = new ArrayList<HiveModel>();
+ ResultSet rs1 = null;
+
+ try {
+ Statement statement = connection.createStatement();
+ connection.setAutoCommit(false);
+ PreparedStatement prSt = null;
+ ResultSet rs;
+ if (username.equals("all")) {
+ } else {
+
+ prSt = huedatabase.getUseridfromUserName(connection, username);
+
+ rs = prSt.executeQuery();
+
+ while (rs.next()) {
+ id = rs.getInt("id");
+ }
+ }
+
+ if (startdate.equals("") && endtime.equals("")) {
+ if (username.equals("all")) {
+ prSt = huedatabase.getQueriesNoStartDateNoEndDateAllUser(connection);
+ } else {
+ prSt = huedatabase.getQueriesNoStartDateNoEndDate(connection, id);
+
+ }
+
+ } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+ if (username.equals("all")) {
+ prSt = huedatabase.getQueriesNoStartDateYesEndDateAllUser(connection, endtime);
+ } else {
+ prSt = huedatabase.getQueriesNoStartDateYesEndDate(connection, id, endtime);
+
+ }
+ } else if (!(startdate.equals("")) && (endtime.equals(""))) {
+ if (username.equals("all")) {
+ prSt = huedatabase.getQueriesYesStartDateNoEndDateAllUser(connection, startdate);
+ } else {
+ prSt = huedatabase.getQueriesYesStartDateNoEndDate(connection, id, startdate);
+
+ }
+
+ } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+ if (username.equals("all")) {
+ prSt = huedatabase.getQueriesYesStartDateYesEndDateAllUser(connection, startdate, endtime);
+ } else {
+ prSt = huedatabase.getQueriesYesStartDateYesEndDate(connection, id, startdate, endtime);
+ }
+
+ }
+
+ rs1 = prSt.executeQuery();
+
+
+ while (rs1.next()) {
+ HiveModel hivepojo = new HiveModel();
+ String name = rs1.getString("name");
+ String temp = rs1.getString("data");
+ InputStream is = new ByteArrayInputStream(temp.getBytes());
+ BufferedReader rd = new BufferedReader(new InputStreamReader(
+ is, Charset.forName("UTF-8")));
+ String jsonText = readAll(rd);
+ JSONObject json = new JSONObject(jsonText);
+ String resources = json.get("query").toString();
+ json = new JSONObject(resources);
+
+ String resarr = (json.get("query")).toString();
+
+ json = new JSONObject(resources);
+ String database = (json.get("database")).toString();
+ hivepojo.setQuery(resarr);
+ hivepojo.setDatabase(database);
+ hivepojo.setOwner(name);
+ hiveArrayList.add(hivepojo);
+ i++;
+ }
+
+ } catch (SQLException e2) {
+ e2.printStackTrace();
+ } finally
+
+ {
+ try {
+ if (connection != null)
+ connection.close();
+ } catch (SQLException e) {
+ logger.error("sql connection exception", e);
+ }
+ }
+
+ return hiveArrayList;
+
+ }
+
+
+ public void writetoFilequeryHql(String content, String homedir) {
+ try {
+ File file = new File(homedir + "query.hql");
+ if (!file.exists()) {
+ file.createNewFile();
+ }
+
+ FileWriter fw = new FileWriter(file.getAbsoluteFile());
+ BufferedWriter bw = new BufferedWriter(fw);
+ bw.write(content);
+ bw.close();
+
+ } catch (IOException e) {
+ logger.error("IOException: ", e);
+ }
+
+ }
+
+ public void deleteFileQueryhql(String homedir) {
+ try {
+ File file = new File(homedir + "query.hql");
+
+ if (file.delete()) {
+ logger.info("temporary hql file deleted");
+ } else {
+ logger.info("temporary hql file delete failed");
+ }
+
+ } catch (Exception e) {
+
+ logger.error("File Exception ", e);
+
+ }
+
+ }
+
+ public void deleteFileQueryLogs(String homedir) {
+ try {
+ File file = new File(homedir + "logs");
+
+ if (file.delete()) {
+ logger.info("temporary logs file deleted");
+ } else {
+ logger.info("temporary logs file delete failed");
+ }
+
+ } catch (Exception e) {
+
+ logger.error("File Exception ", e);
+
+ }
+
+ }
+
+
+ public void writetoFileLogs(String homedir) {
+ try {
+
+ String content = "";
+ File file = new File(homedir + "logs");
+
+ // if file doesnt exists, then create it
+ if (!file.exists()) {
+ file.createNewFile();
+ }
+
+ FileWriter fw = new FileWriter(file.getAbsoluteFile());
+ BufferedWriter bw = new BufferedWriter(fw);
+ bw.write(content);
+ bw.close();
+
+ } catch (IOException e) {
+ logger.error("IOException: ", e);
+ }
+
+ }
+
+ public void createDirHive(final String dir, final String namenodeuri)
+ throws IOException, URISyntaxException {
+
+ try {
+ final Configuration conf = new Configuration();
+
+ conf.set("fs.hdfs.impl",
+ org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+ );
+ conf.set("fs.file.impl",
+ org.apache.hadoop.fs.LocalFileSystem.class.getName()
+ );
+ conf.set("fs.defaultFS", namenodeuri);
+ conf.set("hadoop.job.ugi", "hdfs");
+ conf.set("hadoop.security.authentication", "Kerberos");
+
+ UserGroupInformation.setConfiguration(conf);
+ UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+ ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+ public Void run() throws Exception {
+
+ FileSystem fs = FileSystem.get(conf);
+ Path src = new Path(dir);
+ fs.mkdirs(src);
+ return null;
+ }
+ });
+ } catch (Exception e) {
+ logger.error("Webhdfs: ", e);
+ }
+ }
+
+ public void createDirHiveSecured(final String dir, final String namenodeuri)
+ throws IOException, URISyntaxException {
+
+ try {
+ final Configuration conf = new Configuration();
+
+ conf.set("fs.hdfs.impl",
+ org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+ );
+ conf.set("fs.file.impl",
+ org.apache.hadoop.fs.LocalFileSystem.class.getName()
+ );
+ conf.set("fs.defaultFS", namenodeuri);
+ conf.set("hadoop.job.ugi", "hdfs");
+ conf.set("hadoop.security.authentication", "Kerberos");
+
+ UserGroupInformation.setConfiguration(conf);
+ UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+ ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+ public Void run() throws Exception {
+
+ FileSystem fs = FileSystem.get(conf);
+ Path src = new Path(dir);
+ fs.mkdirs(src);
+ return null;
+ }
+ });
+ } catch (Exception e) {
+ logger.error("Webhdfs: ", e);
+ }
+ }
+
+ public void putFileinHdfs(final String source, final String dest,
+ final String namenodeuri) throws IOException {
+
+ try {
+ final Configuration conf = new Configuration();
+
+ conf.set("fs.hdfs.impl",
+ org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+ );
+ conf.set("fs.file.impl",
+ org.apache.hadoop.fs.LocalFileSystem.class.getName()
+ );
+ conf.set("fs.defaultFS", namenodeuri);
+ conf.set("hadoop.job.ugi", "hdfs");
+ conf.set("hadoop.security.authentication", "Kerberos");
+
+ UserGroupInformation.setConfiguration(conf);
+ UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+ ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+ public Void run() throws Exception {
+
+ FileSystem fileSystem = FileSystem.get(conf);
+ String filename = source.substring(
+ source.lastIndexOf('/') + 1, source.length());
+ String dest1;
+ if (dest.charAt(dest.length() - 1) != '/') {
+ dest1 = dest + "/" + filename;
+ } else {
+ dest1 = dest + filename;
+ }
+
+ Path path = new Path(dest1);
+ if (fileSystem.exists(path)) {
+
+ }
+ FSDataOutputStream out = fileSystem.create(path);
+
+ InputStream in = new BufferedInputStream(
+ new FileInputStream(new File(source)));
+
+ byte[] b = new byte[1024];
+ int numBytes = 0;
+ while ((numBytes = in.read(b)) > 0) {
+ out.write(b, 0, numBytes);
+ }
+ in.close();
+ out.close();
+ fileSystem.close();
+ return null;
+ }
+ });
+ } catch (Exception e) {
+ logger.error("Webhdfs exception", e);
+ }
+
+ }
+
+
+ public void putFileinHdfsSecured(final String source, final String dest,
+ final String namenodeuri) throws IOException {
+
+ try {
+ final Configuration conf = new Configuration();
+
+ conf.set("fs.hdfs.impl",
+ org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+ );
+ conf.set("fs.file.impl",
+ org.apache.hadoop.fs.LocalFileSystem.class.getName()
+ );
+ conf.set("fs.defaultFS", namenodeuri);
+ conf.set("hadoop.job.ugi", "hdfs");
+ conf.set("hadoop.security.authentication", "Kerberos");
+
+ UserGroupInformation.setConfiguration(conf);
+ UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+ ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+ public Void run() throws Exception {
+
+
+ FileSystem fileSystem = FileSystem.get(conf);
+
+ String filename = source.substring(
+ source.lastIndexOf('/') + 1, source.length());
+ String dest1;
+ if (dest.charAt(dest.length() - 1) != '/') {
+ dest1 = dest + "/" + filename;
+ } else {
+ dest1 = dest + filename;
+ }
+
+ Path path = new Path(dest1);
+ if (fileSystem.exists(path)) {
+
+ }
+ // Path pathsource = new Path(source);
+ FSDataOutputStream out = fileSystem.create(path);
+
+ InputStream in = new BufferedInputStream(
+ new FileInputStream(new File(source)));
+
+ byte[] b = new byte[1024];
+ int numBytes = 0;
+ while ((numBytes = in.read(b)) > 0) {
+ out.write(b, 0, numBytes);
+ }
+ in.close();
+ out.close();
+ fileSystem.close();
+
+
+ return null;
+ }
+ });
+ } catch (Exception e) {
+ logger.error("Webhdfs exception", e);
+ }
+
+ }
+
+}