You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sr...@apache.org on 2015/09/24 02:31:58 UTC
[2/2] ambari git commit: AMBARI-13217. Hive View does not support HS2
configured with LDAP Auth (Pallav Kulshreshtha via srimanth)
AMBARI-13217. Hive View does not support HS2 configured with LDAP Auth (Pallav Kulshreshtha via srimanth)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3f1af535
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3f1af535
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3f1af535
Branch: refs/heads/branch-2.1.2
Commit: 3f1af5357b823da23ad1f05bd8095e7310b3bb36
Parents: 031ba83
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Wed Sep 23 17:30:39 2015 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Wed Sep 23 17:30:39 2015 -0700
----------------------------------------------------------------------
.../ambari/view/hive/client/Connection.java | 25 +-
.../view/hive/client/ConnectionFactory.java | 19 +-
.../view/hive/client/HiveAuthCredentials.java | 31 ++
.../hive/client/HiveAuthRequiredException.java | 27 ++
.../client/HiveClientAuthRequiredException.java | 25 +
.../view/hive/client/IConnectionFactory.java | 24 -
.../view/hive/client/UserLocalConnection.java | 45 ++
.../client/UserLocalHiveAuthCredentials.java | 33 ++
.../resources/browser/HiveBrowserService.java | 34 +-
.../browser/HiveBrowserService.java.orig | 282 +++++++++++
.../view/hive/resources/jobs/JobService.java | 59 ++-
.../hive/resources/jobs/JobService.java.orig | 476 +++++++++++++++++++
.../jobs/OperationHandleController.java | 22 +-
.../jobs/OperationHandleControllerFactory.java | 12 +-
.../jobs/viewJobs/JobControllerFactory.java | 1 -
.../jobs/viewJobs/JobControllerImpl.java | 5 +-
.../view/hive/utils/SharedObjectsFactory.java | 27 +-
.../ui/hive-web/app/controllers/databases.js | 48 ++
.../ui/hive-web/app/controllers/index.js | 50 ++
.../ui/hive-web/app/controllers/modal-save.js | 1 +
.../ui/hive-web/app/initializers/i18n.js | 3 +
.../ui/hive-web/app/routes/application.js | 1 +
.../ui/hive-web/app/services/database.js | 2 +-
.../ui/hive-web/app/templates/modal-save.hbs | 2 +-
.../apache/ambari/view/hive/BaseHiveTest.java | 21 +-
.../ambari/view/hive/client/ConnectionTest.java | 63 +++
.../hive/resources/jobs/JobLDAPServiceTest.java | 158 ++++++
.../hive/resources/jobs/JobServiceTest.java | 3 +-
.../org/apache/ambari/view/utils/UserLocal.java | 129 +++++
.../ambari/view/utils/UserLocalFactory.java | 27 ++
.../ambari/view/utils/ViewUserLocalTest.java | 6 +-
31 files changed, 1547 insertions(+), 114 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
index c30b269..d93df24 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
@@ -43,7 +43,7 @@ import java.util.List;
import java.util.Map;
/**
- * Holds session
+ * Holds sessions
*/
public class Connection {
private final static Logger LOG =
@@ -59,12 +59,15 @@ public class Connection {
private DDLDelegator ddl;
private String username;
+ private String password;
- public Connection(String host, int port, Map<String, String> authParams, String username) throws HiveClientException {
+ public Connection(String host, int port, Map<String, String> authParams, String username, String password)
+ throws HiveClientException, HiveAuthRequiredException {
this.host = host;
this.port = port;
this.authParams = authParams;
this.username = username;
+ this.password = password;
this.sessHandles = new HashMap<String, TSessionHandle>();
@@ -76,7 +79,7 @@ public class Connection {
return ddl;
}
- public synchronized void openConnection() throws HiveClientException {
+ public synchronized void openConnection() throws HiveClientException, HiveAuthRequiredException {
try {
transport = getTransport();
transport.open();
@@ -94,7 +97,7 @@ public class Connection {
* @return transport
* @throws HiveClientException
*/
- protected TTransport getTransport() throws HiveClientException, TTransportException {
+ protected TTransport getTransport() throws HiveClientException, TTransportException, HiveAuthRequiredException {
TTransport transport;
boolean assumeSubject =
Utils.HiveAuthenticationParams.AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equals(authParams
@@ -133,7 +136,7 @@ public class Connection {
} else {
// we are using PLAIN Sasl connection with user/password
String userName = getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_USER, getUsername());
- String passwd = getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_PASSWD, Utils.HiveAuthenticationParams.ANONYMOUS_USER);
+ String passwd = getPassword();
// Note: Thrift returns an SSL socket that is already bound to the specified host:port
// Therefore an open called on this would be a no-op later
// Hence, any TTransportException related to connecting with the peer are thrown here.
@@ -168,6 +171,18 @@ public class Connection {
return transport;
}
+ private String getPassword() throws HiveAuthRequiredException {
+ String password = getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_PASSWD, Utils.HiveAuthenticationParams.ANONYMOUS_USER);
+ if (password.equals("${ask_password}")) {
+ if (this.password == null) {
+ throw new HiveAuthRequiredException();
+ } else {
+ password = this.password;
+ }
+ }
+ return password;
+ }
+
private boolean isSslConnection() {
return "true".equalsIgnoreCase(authParams.get(Utils.HiveAuthenticationParams.USE_SSL));
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
index 5a4406a..d0f9c8b 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
@@ -21,6 +21,7 @@ package org.apache.ambari.view.hive.client;
import org.apache.ambari.view.ViewContext;
import org.apache.ambari.view.hive.utils.HiveClientFormattedException;
import org.apache.ambari.view.hive.utils.ServiceFormattedException;
+import org.apache.ambari.view.utils.UserLocalFactory;
import org.apache.ambari.view.utils.ambari.AmbariApi;
import org.apache.ambari.view.utils.ambari.AmbariApiException;
import org.slf4j.Logger;
@@ -31,22 +32,24 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-public class ConnectionFactory implements IConnectionFactory {
+public class ConnectionFactory implements UserLocalFactory<Connection> {
private final static Logger LOG =
LoggerFactory.getLogger(ConnectionFactory.class);
private ViewContext context;
+ private HiveAuthCredentials credentials;
private AmbariApi ambariApi;
- public ConnectionFactory(ViewContext context) {
+ public ConnectionFactory(ViewContext context, HiveAuthCredentials credentials) {
this.context = context;
+ this.credentials = credentials;
this.ambariApi = new AmbariApi(context);
}
@Override
- public Connection getHiveConnection() {
+ public Connection create() {
try {
return new Connection(getHiveHost(), Integer.valueOf(getHivePort()),
- getHiveAuthParams(), context.getUsername());
+ getHiveAuthParams(), context.getUsername(), getCredentials().getPassword());
} catch (HiveClientException e) {
throw new HiveClientFormattedException(e);
}
@@ -91,4 +94,12 @@ public class ConnectionFactory implements IConnectionFactory {
}
return params;
}
+
+ public HiveAuthCredentials getCredentials() {
+ return credentials;
+ }
+
+ public void setCredentials(HiveAuthCredentials credentials) {
+ this.credentials = credentials;
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthCredentials.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthCredentials.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthCredentials.java
new file mode 100644
index 0000000..2b3f43b
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthCredentials.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+public class HiveAuthCredentials {
+ private String password;
+
+ public String getPassword() {
+ return password;
+ }
+
+ public void setPassword(String password) {
+ this.password = password;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthRequiredException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthRequiredException.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthRequiredException.java
new file mode 100644
index 0000000..ac15f2f
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveAuthRequiredException.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import org.apache.ambari.view.hive.utils.ServiceFormattedException;
+
+public class HiveAuthRequiredException extends ServiceFormattedException {
+ public HiveAuthRequiredException() {
+ super("Hive Password Required", null, 401);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientAuthRequiredException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientAuthRequiredException.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientAuthRequiredException.java
new file mode 100644
index 0000000..955bdf9
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientAuthRequiredException.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+public class HiveClientAuthRequiredException extends Exception {
+ public HiveClientAuthRequiredException(String comment, Exception ex) {
+ super(comment + ((ex == null)?"":(": " + ex.toString())), ex);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/IConnectionFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/IConnectionFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/IConnectionFactory.java
deleted file mode 100644
index 7a3df06..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/IConnectionFactory.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-
-public interface IConnectionFactory {
- Connection getHiveConnection();
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalConnection.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalConnection.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalConnection.java
new file mode 100644
index 0000000..c80a4c4
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalConnection.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.utils.UserLocal;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class UserLocalConnection extends UserLocal<Connection> {
+ protected final static Logger LOG =
+ LoggerFactory.getLogger(UserLocalConnection.class);
+
+ public UserLocalConnection() {
+ super(Connection.class);
+ }
+
+ private UserLocal<HiveAuthCredentials> authCredentialsLocal =
+ new UserLocalHiveAuthCredentials();
+
+ @Override
+ protected synchronized Connection initialValue(ViewContext context) {
+ ConnectionFactory hiveConnectionFactory = new ConnectionFactory(context, authCredentialsLocal.get(context));
+ authCredentialsLocal.remove(context); // we should not store credentials in memory,
+ // password is erased after connection established
+ return hiveConnectionFactory.create();
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalHiveAuthCredentials.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalHiveAuthCredentials.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalHiveAuthCredentials.java
new file mode 100644
index 0000000..9c72863
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/UserLocalHiveAuthCredentials.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.utils.UserLocal;
+
+public class UserLocalHiveAuthCredentials extends UserLocal<HiveAuthCredentials> {
+ public UserLocalHiveAuthCredentials() {
+ super(HiveAuthCredentials.class);
+ }
+
+ @Override
+ protected synchronized HiveAuthCredentials initialValue(ViewContext context) {
+ return new HiveAuthCredentials();
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java
index 55919a7..f758fe3 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java
@@ -22,7 +22,7 @@ import org.apache.ambari.view.ViewContext;
import org.apache.ambari.view.ViewResourceHandler;
import org.apache.ambari.view.hive.client.ColumnDescription;
import org.apache.ambari.view.hive.client.Cursor;
-import org.apache.ambari.view.hive.client.IConnectionFactory;
+import org.apache.ambari.view.hive.client.UserLocalConnection;
import org.apache.ambari.view.hive.resources.jobs.ResultsPaginationController;
import org.apache.ambari.view.hive.utils.BadRequestFormattedException;
import org.apache.ambari.view.hive.utils.ServiceFormattedException;
@@ -56,7 +56,7 @@ public class HiveBrowserService {
private static final long EXPIRING_TIME = 10*60*1000; // 10 minutes
private static Map<String, Cursor> resultsCache;
- private IConnectionFactory connectionFactory;
+ private UserLocalConnection connectionLocal = new UserLocalConnection();
public static Map<String, Cursor> getResultsCache() {
if (resultsCache == null) {
@@ -67,12 +67,6 @@ public class HiveBrowserService {
return resultsCache;
}
- private IConnectionFactory getConnectionFactory() {
- if (connectionFactory == null)
- connectionFactory = new SharedObjectsFactory(context);
- return new SharedObjectsFactory(context);
- }
-
/**
* Returns list of databases
*/
@@ -90,8 +84,8 @@ public class HiveBrowserService {
String curl = null;
try {
JSONObject response = new JSONObject();
- TSessionHandle session = getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
- List<String> tables = getConnectionFactory().getHiveConnection().ddl().getDBList(session, like);
+ TSessionHandle session = connectionLocal.get(context).getOrCreateSessionByTag("DDL");
+ List<String> tables = connectionLocal.get(context).ddl().getDBList(session, like);
response.put("databases", tables);
return Response.ok(response).build();
} catch (WebApplicationException ex) {
@@ -127,8 +121,8 @@ public class HiveBrowserService {
new Callable<Cursor>() {
@Override
public Cursor call() throws Exception {
- TSessionHandle session = getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
- return getConnectionFactory().getHiveConnection().ddl().getDBListCursor(session, finalLike);
+ TSessionHandle session = connectionLocal.get(context).getOrCreateSessionByTag("DDL");
+ return connectionLocal.get(context).ddl().getDBListCursor(session, finalLike);
}
}).build();
} catch (WebApplicationException ex) {
@@ -158,8 +152,8 @@ public class HiveBrowserService {
String curl = null;
try {
JSONObject response = new JSONObject();
- TSessionHandle session = getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
- List<String> tables = getConnectionFactory().getHiveConnection().ddl().getTableList(session, db, like);
+ TSessionHandle session = connectionLocal.get(context).getOrCreateSessionByTag("DDL");
+ List<String> tables = connectionLocal.get(context).ddl().getTableList(session, db, like);
response.put("tables", tables);
response.put("database", db);
return Response.ok(response).build();
@@ -197,8 +191,8 @@ public class HiveBrowserService {
new Callable<Cursor>() {
@Override
public Cursor call() throws Exception {
- TSessionHandle session = getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
- Cursor cursor = getConnectionFactory().getHiveConnection().ddl().getTableListCursor(session, db, finalLike);
+ TSessionHandle session = connectionLocal.get(context).getOrCreateSessionByTag("DDL");
+ Cursor cursor = connectionLocal.get(context).ddl().getTableListCursor(session, db, finalLike);
cursor.selectColumns(requestedColumns);
return cursor;
}
@@ -227,8 +221,8 @@ public class HiveBrowserService {
String curl = null;
try {
JSONObject response = new JSONObject();
- TSessionHandle session = getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
- List<ColumnDescription> columnDescriptions = getConnectionFactory().getHiveConnection().ddl()
+ TSessionHandle session = connectionLocal.get(context).getOrCreateSessionByTag("DDL");
+ List<ColumnDescription> columnDescriptions = connectionLocal.get(context).ddl()
.getTableDescription(session, db, table, like, extendedTableDescription);
response.put("columns", columnDescriptions);
response.put("database", db);
@@ -264,8 +258,8 @@ public class HiveBrowserService {
new Callable<Cursor>() {
@Override
public Cursor call() throws Exception {
- TSessionHandle session = getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
- Cursor cursor = getConnectionFactory().getHiveConnection().ddl().
+ TSessionHandle session = connectionLocal.get(context).getOrCreateSessionByTag("DDL");
+ Cursor cursor = connectionLocal.get(context).ddl().
getTableDescriptionCursor(session, db, table, like);
cursor.selectColumns(requestedColumns);
return cursor;
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java.orig
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java.orig b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java.orig
new file mode 100644
index 0000000..55919a7
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java.orig
@@ -0,0 +1,282 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.browser;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.ViewResourceHandler;
+import org.apache.ambari.view.hive.client.ColumnDescription;
+import org.apache.ambari.view.hive.client.Cursor;
+import org.apache.ambari.view.hive.client.IConnectionFactory;
+import org.apache.ambari.view.hive.resources.jobs.ResultsPaginationController;
+import org.apache.ambari.view.hive.utils.BadRequestFormattedException;
+import org.apache.ambari.view.hive.utils.ServiceFormattedException;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
+import org.apache.commons.collections4.map.PassiveExpiringMap;
+import org.apache.hive.service.cli.thrift.TSessionHandle;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.ws.rs.*;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Callable;
+
+/**
+ * Database access resource
+ */
+public class HiveBrowserService {
+ @Inject
+ ViewResourceHandler handler;
+ @Inject
+ protected ViewContext context;
+
+ protected final static Logger LOG =
+ LoggerFactory.getLogger(HiveBrowserService.class);
+
+ private static final long EXPIRING_TIME = 10*60*1000; // 10 minutes
+ private static Map<String, Cursor> resultsCache;
+ private IConnectionFactory connectionFactory;
+
+ public static Map<String, Cursor> getResultsCache() {
+ if (resultsCache == null) {
+ PassiveExpiringMap<String, Cursor> resultsCacheExpiringMap =
+ new PassiveExpiringMap<String, Cursor>(EXPIRING_TIME);
+ resultsCache = Collections.synchronizedMap(resultsCacheExpiringMap);
+ }
+ return resultsCache;
+ }
+
+ private IConnectionFactory getConnectionFactory() {
+ if (connectionFactory == null)
+ connectionFactory = new SharedObjectsFactory(context);
+ return new SharedObjectsFactory(context);
+ }
+
+ /**
+ * Returns list of databases
+ */
+ @GET
+ @Path("database")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response databases(@QueryParam("like")String like,
+ @QueryParam("first") String fromBeginning,
+ @QueryParam("count") Integer count,
+ @QueryParam("columns") final String requestedColumns) {
+ if (like == null)
+ like = "*";
+ else
+ like = "*" + like + "*";
+ String curl = null;
+ try {
+ JSONObject response = new JSONObject();
+ TSessionHandle session = getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
+ List<String> tables = getConnectionFactory().getHiveConnection().ddl().getDBList(session, like);
+ response.put("databases", tables);
+ return Response.ok(response).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (IllegalArgumentException ex) {
+ throw new BadRequestFormattedException(ex.getMessage(), ex);
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex, curl);
+ }
+ }
+
+ /**
+ * Returns list of databases
+ */
+ @GET
+ @Path("database.page")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response databasesPaginated(@QueryParam("like")String like,
+ @QueryParam("first") String fromBeginning,
+ @QueryParam("count") Integer count,
+ @QueryParam("searchId") String searchId,
+ @QueryParam("format") String format,
+ @QueryParam("columns") final String requestedColumns) {
+ if (like == null)
+ like = "*";
+ else
+ like = "*" + like + "*";
+ String curl = null;
+ try {
+ final String finalLike = like;
+ return ResultsPaginationController.getInstance(context)
+ .request("databases", searchId, false, fromBeginning, count, format,
+ new Callable<Cursor>() {
+ @Override
+ public Cursor call() throws Exception {
+ TSessionHandle session = getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
+ return getConnectionFactory().getHiveConnection().ddl().getDBListCursor(session, finalLike);
+ }
+ }).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (IllegalArgumentException ex) {
+ throw new BadRequestFormattedException(ex.getMessage(), ex);
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex, curl);
+ }
+ }
+
+ /**
+ * Returns list of databases
+ */
+ @GET
+ @Path("database/{db}/table")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response tablesInDatabase(@PathParam("db") String db,
+ @QueryParam("like")String like,
+ @QueryParam("first") String fromBeginning,
+ @QueryParam("count") Integer count,
+ @QueryParam("columns") final String requestedColumns) {
+ if (like == null)
+ like = "*";
+ else
+ like = "*" + like + "*";
+ String curl = null;
+ try {
+ JSONObject response = new JSONObject();
+ TSessionHandle session = getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
+ List<String> tables = getConnectionFactory().getHiveConnection().ddl().getTableList(session, db, like);
+ response.put("tables", tables);
+ response.put("database", db);
+ return Response.ok(response).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (IllegalArgumentException ex) {
+ throw new BadRequestFormattedException(ex.getMessage(), ex);
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex, curl);
+ }
+ }
+
+ /**
+ * Returns list of databases
+ */
+ @GET
+ @Path("database/{db}/table.page")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response tablesInDatabasePaginated(@PathParam("db") final String db,
+ @QueryParam("like")String like,
+ @QueryParam("first") String fromBeginning,
+ @QueryParam("count") Integer count,
+ @QueryParam("searchId") String searchId,
+ @QueryParam("format") String format,
+ @QueryParam("columns") final String requestedColumns) {
+ if (like == null)
+ like = "*";
+ else
+ like = "*" + like + "*";
+ String curl = null;
+ try {
+ final String finalLike = like;
+ return ResultsPaginationController.getInstance(context)
+ .request(db + ":tables", searchId, false, fromBeginning, count, format,
+ new Callable<Cursor>() {
+ @Override
+ public Cursor call() throws Exception {
+ TSessionHandle session = getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
+ Cursor cursor = getConnectionFactory().getHiveConnection().ddl().getTableListCursor(session, db, finalLike);
+ cursor.selectColumns(requestedColumns);
+ return cursor;
+ }
+ }).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (IllegalArgumentException ex) {
+ throw new BadRequestFormattedException(ex.getMessage(), ex);
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex, curl);
+ }
+ }
+
+ /**
+ * Returns list of databases
+ */
+ @GET
+ @Path("database/{db}/table/{table}")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response describeTable(@PathParam("db") String db,
+ @PathParam("table") String table,
+ @QueryParam("like") String like,
+ @QueryParam("columns") String requestedColumns,
+ @QueryParam("extended") String extended) {
+ boolean extendedTableDescription = (extended != null && extended.equals("true"));
+ String curl = null;
+ try {
+ JSONObject response = new JSONObject();
+ TSessionHandle session = getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
+ List<ColumnDescription> columnDescriptions = getConnectionFactory().getHiveConnection().ddl()
+ .getTableDescription(session, db, table, like, extendedTableDescription);
+ response.put("columns", columnDescriptions);
+ response.put("database", db);
+ response.put("table", table);
+ return Response.ok(response).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (IllegalArgumentException ex) {
+ throw new BadRequestFormattedException(ex.getMessage(), ex);
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex, curl);
+ }
+ }
+
+ /**
+ * Returns list of databases
+ */
+ @GET
+ @Path("database/{db}/table/{table}.page")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response describeTablePaginated(@PathParam("db") final String db,
+ @PathParam("table") final String table,
+ @QueryParam("like") final String like,
+ @QueryParam("first") String fromBeginning,
+ @QueryParam("searchId") String searchId,
+ @QueryParam("count") Integer count,
+ @QueryParam("format") String format,
+ @QueryParam("columns") final String requestedColumns) {
+ String curl = null;
+ try {
+ return ResultsPaginationController.getInstance(context)
+ .request(db + ":tables:" + table + ":columns", searchId, false, fromBeginning, count, format,
+ new Callable<Cursor>() {
+ @Override
+ public Cursor call() throws Exception {
+ TSessionHandle session = getConnectionFactory().getHiveConnection().getOrCreateSessionByTag("DDL");
+ Cursor cursor = getConnectionFactory().getHiveConnection().ddl().
+ getTableDescriptionCursor(session, db, table, like);
+ cursor.selectColumns(requestedColumns);
+ return cursor;
+ }
+ }).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (IllegalArgumentException ex) {
+ throw new BadRequestFormattedException(ex.getMessage(), ex);
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex, curl);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
index ad46e33..a2c5f5e 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
@@ -21,9 +21,7 @@ package org.apache.ambari.view.hive.resources.jobs;
import org.apache.ambari.view.ViewResourceHandler;
import org.apache.ambari.view.hive.BaseService;
import org.apache.ambari.view.hive.backgroundjobs.BackgroundJobController;
-import org.apache.ambari.view.hive.client.Connection;
-import org.apache.ambari.view.hive.client.Cursor;
-import org.apache.ambari.view.hive.client.HiveClientException;
+import org.apache.ambari.view.hive.client.*;
import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
import org.apache.ambari.view.hive.resources.jobs.atsJobs.IATSParser;
import org.apache.ambari.view.hive.resources.jobs.viewJobs.*;
@@ -60,8 +58,10 @@ public class JobService extends BaseService {
@Inject
ViewResourceHandler handler;
- protected JobResourceManager resourceManager;
+ private JobResourceManager resourceManager;
private IOperationHandleResourceManager opHandleResourceManager;
+ private UserLocalConnection connectionLocal = new UserLocalConnection();
+
protected final static Logger LOG =
LoggerFactory.getLogger(JobService.class);
private Aggregator aggregator;
@@ -422,13 +422,53 @@ public class JobService extends BaseService {
}
/**
+ * Set password and connect to Hive
+ */
+ @POST
+ @Path("auth")
+ @Consumes(MediaType.APPLICATION_JSON)
+ public Response setupPassword(AuthRequest request) {
+ try {
+ HiveAuthCredentials authCredentials = new HiveAuthCredentials();
+ authCredentials.setPassword(request.password);
+ new UserLocalHiveAuthCredentials().set(authCredentials, context);
+
+ connectionLocal.remove(context); // force reconnect on next get
+ connectionLocal.get(context);
+ return Response.ok().status(200).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+ }
+
+ /**
+ * Remove connection credentials
+ */
+ @DELETE
+ @Path("auth")
+ public Response removePassword() {
+ try {
+ new UserLocalHiveAuthCredentials().remove(context);
+ connectionLocal.remove(context); // force reconnect on next get
+ return Response.ok().status(200).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+ }
+
+
+ /**
* Invalidate session
*/
@DELETE
@Path("sessions/{sessionTag}")
public Response invalidateSession(@PathParam("sessionTag") String sessionTag) {
try {
- Connection connection = getSharedObjectsFactory().getHiveConnection();
+ Connection connection = connectionLocal.get(context);
connection.invalidateSessionByTag(sessionTag);
return Response.ok().build();
} catch (WebApplicationException ex) {
@@ -446,7 +486,7 @@ public class JobService extends BaseService {
@Produces(MediaType.APPLICATION_JSON)
public Response sessionStatus(@PathParam("sessionTag") String sessionTag) {
try {
- Connection connection = getSharedObjectsFactory().getHiveConnection();
+ Connection connection = connectionLocal.get(context);
JSONObject session = new JSONObject();
session.put("sessionTag", sessionTag);
@@ -473,4 +513,11 @@ public class JobService extends BaseService {
public static class JobRequest {
public JobImpl job;
}
+
+ /**
+ * Wrapper for authentication json mapping
+ */
+ public static class AuthRequest {
+ public String password;
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java.orig
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java.orig b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java.orig
new file mode 100644
index 0000000..ad46e33
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java.orig
@@ -0,0 +1,476 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs;
+
+import org.apache.ambari.view.ViewResourceHandler;
+import org.apache.ambari.view.hive.BaseService;
+import org.apache.ambari.view.hive.backgroundjobs.BackgroundJobController;
+import org.apache.ambari.view.hive.client.Connection;
+import org.apache.ambari.view.hive.client.Cursor;
+import org.apache.ambari.view.hive.client.HiveClientException;
+import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.IATSParser;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.*;
+import org.apache.ambari.view.hive.utils.*;
+import org.apache.commons.beanutils.PropertyUtils;
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVPrinter;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.*;
+import javax.ws.rs.core.*;
+import java.io.*;
+import java.lang.reflect.InvocationTargetException;
+import java.util.*;
+import java.util.concurrent.Callable;
+
+/**
+ * Servlet for queries
+ * API:
+ * GET /:id
+ * read job
+ * POST /
+ * create new job
+ * Required: title, queryFile
+ * GET /
+ * get all Jobs of current user
+ */
+public class JobService extends BaseService {
+ @Inject
+ ViewResourceHandler handler;
+
+ protected JobResourceManager resourceManager;
+ private IOperationHandleResourceManager opHandleResourceManager;
+ protected final static Logger LOG =
+ LoggerFactory.getLogger(JobService.class);
+ private Aggregator aggregator;
+
+ protected synchronized JobResourceManager getResourceManager() {
+ if (resourceManager == null) {
+ SharedObjectsFactory connectionsFactory = getSharedObjectsFactory();
+ resourceManager = new JobResourceManager(connectionsFactory, context);
+ }
+ return resourceManager;
+ }
+
+ protected IOperationHandleResourceManager getOperationHandleResourceManager() {
+ if (opHandleResourceManager == null) {
+ opHandleResourceManager = new OperationHandleResourceManager(getSharedObjectsFactory());
+ }
+ return opHandleResourceManager;
+ }
+
+ protected Aggregator getAggregator() {
+ if (aggregator == null) {
+ IATSParser atsParser = getSharedObjectsFactory().getATSParser();
+ aggregator = new Aggregator(getResourceManager(), getOperationHandleResourceManager(), atsParser);
+ }
+ return aggregator;
+ }
+
+ protected void setAggregator(Aggregator aggregator) {
+ this.aggregator = aggregator;
+ }
+
+ /**
+ * Get single item
+ */
+ @GET
+ @Path("{jobId}")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response getOne(@PathParam("jobId") String jobId) {
+ try {
+ JobController jobController = getResourceManager().readController(jobId);
+
+ JSONObject jsonJob = jsonObjectFromJob(jobController);
+
+ return Response.ok(jsonJob).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (ItemNotFound itemNotFound) {
+ throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+ }
+
+ private JSONObject jsonObjectFromJob(JobController jobController) throws IllegalAccessException, NoSuchMethodException, InvocationTargetException {
+ Job hiveJob = jobController.getJobPOJO();
+
+ Job mergedJob;
+ try {
+ mergedJob = getAggregator().readATSJob(hiveJob);
+ } catch (ItemNotFound itemNotFound) {
+ throw new ServiceFormattedException("E010 Job not found", itemNotFound);
+ }
+ Map createdJobMap = PropertyUtils.describe(mergedJob);
+ createdJobMap.remove("class"); // no need to show Bean class on client
+
+ JSONObject jobJson = new JSONObject();
+ jobJson.put("job", createdJobMap);
+ return jobJson;
+ }
+
+ /**
+ * Get job results in csv format
+ */
+ @GET
+ @Path("{jobId}/results/csv")
+ @Produces("text/csv")
+ public Response getResultsCSV(@PathParam("jobId") String jobId,
+ @Context HttpServletResponse response,
+ @QueryParam("fileName") String fileName,
+ @QueryParam("columns") final String requestedColumns) {
+ try {
+ JobController jobController = getResourceManager().readController(jobId);
+ final Cursor resultSet = jobController.getResults();
+ resultSet.selectColumns(requestedColumns);
+
+ StreamingOutput stream = new StreamingOutput() {
+ @Override
+ public void write(OutputStream os) throws IOException, WebApplicationException {
+ Writer writer = new BufferedWriter(new OutputStreamWriter(os));
+ CSVPrinter csvPrinter = new CSVPrinter(writer, CSVFormat.DEFAULT);
+ try {
+
+ try {
+ csvPrinter.printRecord(resultSet.getHeadersRow().getRow());
+ } catch (HiveClientException e) {
+ LOG.error("Error on reading results header", e);
+ }
+
+ while (resultSet.hasNext()) {
+ csvPrinter.printRecord(resultSet.next().getRow());
+ writer.flush();
+ }
+ } finally {
+ writer.close();
+ }
+ }
+ };
+
+ if (fileName == null || fileName.isEmpty()) {
+ fileName = "results.csv";
+ }
+
+ return Response.ok(stream).
+ header("Content-Disposition", String.format("attachment; filename=\"%s\"", fileName)).
+ build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (ItemNotFound itemNotFound) {
+ throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+ }
+
+ /**
+ * Get job results in csv format
+ */
+ @GET
+ @Path("{jobId}/results/csv/saveToHDFS")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response getResultsToHDFS(@PathParam("jobId") String jobId,
+ @QueryParam("commence") String commence,
+ @QueryParam("file") final String targetFile,
+ @QueryParam("stop") final String stop,
+ @QueryParam("columns") final String requestedColumns,
+ @Context HttpServletResponse response) {
+ try {
+ final JobController jobController = getResourceManager().readController(jobId);
+
+ String backgroundJobId = "csv" + String.valueOf(jobController.getJob().getId());
+ if (commence != null && commence.equals("true")) {
+ if (targetFile == null)
+ throw new MisconfigurationFormattedException("targetFile should not be empty");
+ BackgroundJobController.getInstance(context).startJob(String.valueOf(backgroundJobId), new Runnable() {
+ @Override
+ public void run() {
+
+ try {
+ Cursor resultSet = jobController.getResults();
+ resultSet.selectColumns(requestedColumns);
+
+ FSDataOutputStream stream = getSharedObjectsFactory().getHdfsApi().create(targetFile, true);
+ Writer writer = new BufferedWriter(new OutputStreamWriter(stream));
+ CSVPrinter csvPrinter = new CSVPrinter(writer, CSVFormat.DEFAULT);
+ try {
+ while (resultSet.hasNext() && !Thread.currentThread().isInterrupted()) {
+ csvPrinter.printRecord(resultSet.next().getRow());
+ writer.flush();
+ }
+ } finally {
+ writer.close();
+ }
+ stream.close();
+
+ } catch (IOException e) {
+ throw new ServiceFormattedException("F010 Could not write CSV to HDFS for job#" + jobController.getJob().getId(), e);
+ } catch (InterruptedException e) {
+ throw new ServiceFormattedException("F010 Could not write CSV to HDFS for job#" + jobController.getJob().getId(), e);
+ } catch (ItemNotFound itemNotFound) {
+ throw new NotFoundFormattedException("E020 Job results are expired", itemNotFound);
+ }
+
+ }
+ });
+ }
+
+ if (stop != null && stop.equals("true")) {
+ BackgroundJobController.getInstance(context).interrupt(backgroundJobId);
+ }
+
+ JSONObject object = new JSONObject();
+ object.put("stopped", BackgroundJobController.getInstance(context).isInterrupted(backgroundJobId));
+ object.put("jobId", jobController.getJob().getId());
+ object.put("backgroundJobId", backgroundJobId);
+ object.put("operationType", "CSV2HDFS");
+ object.put("status", BackgroundJobController.getInstance(context).state(backgroundJobId).toString());
+
+ return Response.ok(object).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (ItemNotFound itemNotFound) {
+ throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+ }
+
+ /**
+ * Get next results page
+ */
+ @GET
+ @Path("{jobId}/results")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response getResults(@PathParam("jobId") String jobId,
+ @QueryParam("first") String fromBeginning,
+ @QueryParam("count") Integer count,
+ @QueryParam("searchId") String searchId,
+ @QueryParam("format") String format,
+ @QueryParam("columns") final String requestedColumns) {
+ try {
+ final JobController jobController = getResourceManager().readController(jobId);
+ if (!jobController.hasResults()) {
+ return ResultsPaginationController.emptyResponse().build();
+ }
+
+ return ResultsPaginationController.getInstance(context)
+ .request(jobId, searchId, true, fromBeginning, count, format,
+ new Callable<Cursor>() {
+ @Override
+ public Cursor call() throws Exception {
+ Cursor cursor = jobController.getResults();
+ cursor.selectColumns(requestedColumns);
+ return cursor;
+ }
+ }).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (ItemNotFound itemNotFound) {
+ throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+ }
+
+ /**
+ * Renew expiration time for results
+ */
+ @GET
+ @Path("{jobId}/results/keepAlive")
+ public Response keepAliveResults(@PathParam("jobId") String jobId,
+ @QueryParam("first") String fromBeginning,
+ @QueryParam("count") Integer count) {
+ try {
+ if (!ResultsPaginationController.getInstance(context).keepAlive(jobId, ResultsPaginationController.DEFAULT_SEARCH_ID)) {
+ throw new NotFoundFormattedException("Results already expired", null);
+ }
+ return Response.ok().build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+ }
+
+ /**
+ * Get progress info
+ */
+ @GET
+ @Path("{jobId}/progress")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response getProgress(@PathParam("jobId") String jobId) {
+ try {
+ final JobController jobController = getResourceManager().readController(jobId);
+
+ ProgressRetriever.Progress progress = new ProgressRetriever(jobController.getJob(), getSharedObjectsFactory()).
+ getProgress();
+
+ return Response.ok(progress).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (ItemNotFound itemNotFound) {
+ throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+ }
+
+ /**
+ * Delete single item
+ */
+ @DELETE
+ @Path("{id}")
+ public Response delete(@PathParam("id") String id,
+ @QueryParam("remove") final String remove) {
+ try {
+ JobController jobController;
+ try {
+ jobController = getResourceManager().readController(id);
+ } catch (ItemNotFound itemNotFound) {
+ throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+ }
+ jobController.cancel();
+ if (remove != null && remove.compareTo("true") == 0) {
+ getResourceManager().delete(id);
+ }
+ return Response.status(204).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (ItemNotFound itemNotFound) {
+ throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+ }
+
+ /**
+ * Get all Jobs
+ */
+ @GET
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response getList() {
+ try {
+ LOG.debug("Getting all job");
+ List<Job> allJobs = getAggregator().readAll(context.getUsername());
+ for(Job job : allJobs) {
+ job.setSessionTag(null);
+ }
+
+ JSONObject object = new JSONObject();
+ object.put("jobs", allJobs);
+ return Response.ok(object).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+ }
+
+ /**
+ * Create job
+ */
+ @POST
+ @Consumes(MediaType.APPLICATION_JSON)
+ public Response create(JobRequest request, @Context HttpServletResponse response,
+ @Context UriInfo ui) {
+ try {
+ Map jobInfo = PropertyUtils.describe(request.job);
+ Job job = new JobImpl(jobInfo);
+ getResourceManager().create(job);
+
+ JobController createdJobController = getResourceManager().readController(job.getId());
+ createdJobController.submit();
+ getResourceManager().saveIfModified(createdJobController);
+
+ response.setHeader("Location",
+ String.format("%s/%s", ui.getAbsolutePath().toString(), job.getId()));
+
+ JSONObject jobObject = jsonObjectFromJob(createdJobController);
+
+ return Response.ok(jobObject).status(201).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (ItemNotFound itemNotFound) {
+ throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+ }
+
+ /**
+ * Invalidate session
+ */
+ @DELETE
+ @Path("sessions/{sessionTag}")
+ public Response invalidateSession(@PathParam("sessionTag") String sessionTag) {
+ try {
+ Connection connection = getSharedObjectsFactory().getHiveConnection();
+ connection.invalidateSessionByTag(sessionTag);
+ return Response.ok().build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+ }
+
+ /**
+ * Session status
+ */
+ @GET
+ @Path("sessions/{sessionTag}")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response sessionStatus(@PathParam("sessionTag") String sessionTag) {
+ try {
+ Connection connection = getSharedObjectsFactory().getHiveConnection();
+
+ JSONObject session = new JSONObject();
+ session.put("sessionTag", sessionTag);
+ try {
+ connection.getSessionByTag(sessionTag);
+ session.put("actual", true);
+ } catch (HiveClientException ex) {
+ session.put("actual", false);
+ }
+
+ JSONObject status = new JSONObject();
+ status.put("session", session);
+ return Response.ok(status).build();
+ } catch (WebApplicationException ex) {
+ throw ex;
+ } catch (Exception ex) {
+ throw new ServiceFormattedException(ex.getMessage(), ex);
+ }
+ }
+
+ /**
+ * Wrapper object for json mapping
+ */
+ public static class JobRequest {
+ public JobImpl job;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleController.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleController.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleController.java
index 8156c29..faf02b0 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleController.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleController.java
@@ -19,10 +19,10 @@
package org.apache.ambari.view.hive.resources.jobs;
+import org.apache.ambari.view.ViewContext;
import org.apache.ambari.view.hive.client.Cursor;
import org.apache.ambari.view.hive.client.HiveClientException;
-import org.apache.ambari.view.hive.client.IConnectionFactory;
-import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive.client.UserLocalConnection;
import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
import org.apache.ambari.view.hive.utils.HiveClientFormattedException;
import org.apache.hive.service.cli.thrift.TGetOperationStatusResp;
@@ -33,14 +33,16 @@ import org.slf4j.LoggerFactory;
public class OperationHandleController {
private final static Logger LOG =
LoggerFactory.getLogger(OperationHandleController.class);
- private final IConnectionFactory connectionsFabric;
-
private final TOperationHandle operationHandle;
+ private ViewContext context;
private final StoredOperationHandle storedOperationHandle;
private final IOperationHandleResourceManager operationHandlesStorage;
- public OperationHandleController(IConnectionFactory connectionsFabric, StoredOperationHandle storedOperationHandle, IOperationHandleResourceManager operationHandlesStorage) {
- this.connectionsFabric = connectionsFabric;
+ protected UserLocalConnection connectionLocal = new UserLocalConnection();
+
+ public OperationHandleController(ViewContext context, StoredOperationHandle storedOperationHandle,
+ IOperationHandleResourceManager operationHandlesStorage) {
+ this.context = context;
this.storedOperationHandle = storedOperationHandle;
this.operationHandle = storedOperationHandle.toTOperationHandle();
this.operationHandlesStorage = operationHandlesStorage;
@@ -51,7 +53,7 @@ public class OperationHandleController {
}
public OperationStatus getOperationStatus() throws NoOperationStatusSetException, HiveClientException {
- TGetOperationStatusResp statusResp = connectionsFabric.getHiveConnection().getOperationStatus(operationHandle);
+ TGetOperationStatusResp statusResp = connectionLocal.get(context).getOperationStatus(operationHandle);
if (!statusResp.isSetOperationState()) {
throw new NoOperationStatusSetException();
@@ -95,7 +97,7 @@ public class OperationHandleController {
public void cancel() {
try {
- connectionsFabric.getHiveConnection().cancelOperation(operationHandle);
+ connectionLocal.get(context).cancelOperation(operationHandle);
} catch (HiveClientException e) {
throw new HiveClientFormattedException(e);
}
@@ -108,7 +110,7 @@ public class OperationHandleController {
public String getLogs() {
String logs;
try {
- logs = connectionsFabric.getHiveConnection().getLogs(operationHandle);
+ logs = connectionLocal.get(context).getLogs(operationHandle);
} catch (HiveClientFormattedException ex) {
logs = "";
LOG.info(String.format("Logs are not available yet for job #%s [%s]\n%s",
@@ -118,7 +120,7 @@ public class OperationHandleController {
}
public Cursor getResults() {
- return connectionsFabric.getHiveConnection().getResults(operationHandle);
+ return connectionLocal.get(context).getResults(operationHandle);
}
public boolean hasResults() {
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleControllerFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleControllerFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleControllerFactory.java
index 4297078..fe1b01a 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleControllerFactory.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleControllerFactory.java
@@ -18,22 +18,22 @@
package org.apache.ambari.view.hive.resources.jobs;
+import org.apache.ambari.view.ViewContext;
import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
-import org.apache.hive.service.cli.thrift.TOperationHandle;
public class OperationHandleControllerFactory {
- private SharedObjectsFactory connectionsFabric;
private IOperationHandleResourceManager operationHandlesStorage;
+ private ViewContext context;
- public OperationHandleControllerFactory(SharedObjectsFactory connectionsFabric) {
- this.connectionsFabric = connectionsFabric;
- operationHandlesStorage = new OperationHandleResourceManager(connectionsFabric);
+ public OperationHandleControllerFactory(ViewContext context, SharedObjectsFactory storageFactory) {
+ this.context = context;
+ this.operationHandlesStorage = new OperationHandleResourceManager(storageFactory);
}
public OperationHandleController createControllerForHandle(StoredOperationHandle storedOperationHandle) {
- return new OperationHandleController(connectionsFabric, storedOperationHandle, operationHandlesStorage);
+ return new OperationHandleController(context, storedOperationHandle, operationHandlesStorage);
}
public OperationHandleController getHandleForJob(Job job) throws ItemNotFound {
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerFactory.java
index 12d1cdb..a2790d1 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerFactory.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerFactory.java
@@ -33,7 +33,6 @@ public class JobControllerFactory implements IJobControllerFactory {
@Override
public JobController createControllerForJob(Job job) {
return new JobControllerImpl(context, job,
- sharedObjectsFactory.getHiveConnectionController(),
sharedObjectsFactory.getOperationHandleControllerFactory(),
sharedObjectsFactory.getSavedQueryResourceManager(),
sharedObjectsFactory.getATSParser(),
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java
index 32d64ec..3739523 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java
@@ -58,7 +58,6 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
* Warning: Create JobControllers ONLY using JobControllerFactory!
*/
public JobControllerImpl(ViewContext context, Job job,
- ConnectionController hiveConnection,
OperationHandleControllerFactory opHandleControllerFactory,
SavedQueryResourceManager savedQueryResourceManager,
IATSParser atsParser,
@@ -66,10 +65,12 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
this.context = context;
setJobPOJO(job);
this.opHandleControllerFactory = opHandleControllerFactory;
- this.hiveConnection = hiveConnection;
this.savedQueryResourceManager = savedQueryResourceManager;
this.atsParser = atsParser;
this.hdfsApi = hdfsApi;
+
+ UserLocalConnection connectionLocal = new UserLocalConnection();
+ this.hiveConnection = new ConnectionController(opHandleControllerFactory, connectionLocal.get(context));
}
public String getQueryForJob() {
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java
index df67985..bb49400 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java
@@ -20,12 +20,9 @@ package org.apache.ambari.view.hive.utils;
import org.apache.ambari.view.ViewContext;
import org.apache.ambari.view.hive.client.Connection;
-import org.apache.ambari.view.hive.client.ConnectionFactory;
-import org.apache.ambari.view.hive.client.IConnectionFactory;
import org.apache.ambari.view.hive.persistence.IStorageFactory;
import org.apache.ambari.view.hive.persistence.Storage;
import org.apache.ambari.view.hive.persistence.utils.StorageFactory;
-import org.apache.ambari.view.hive.resources.jobs.ConnectionController;
import org.apache.ambari.view.hive.resources.jobs.OperationHandleControllerFactory;
import org.apache.ambari.view.hive.resources.jobs.atsJobs.ATSParser;
import org.apache.ambari.view.hive.resources.jobs.atsJobs.ATSParserFactory;
@@ -48,12 +45,11 @@ import java.util.Map;
* e.g. user 'admin' using view instance 'HIVE1' will use one connection, another user
* will use different connection.
*/
-public class SharedObjectsFactory implements IStorageFactory, IConnectionFactory {
+public class SharedObjectsFactory implements IStorageFactory {
protected final static Logger LOG =
LoggerFactory.getLogger(SharedObjectsFactory.class);
private ViewContext context;
- private final IConnectionFactory hiveConnectionFactory;
private final IStorageFactory storageFactory;
private final ATSParserFactory atsParserFactory;
private final RMParserFactory rmParserFactory;
@@ -62,14 +58,12 @@ public class SharedObjectsFactory implements IStorageFactory, IConnectionFactory
public SharedObjectsFactory(ViewContext context) {
this.context = context;
- this.hiveConnectionFactory = new ConnectionFactory(context);
this.storageFactory = new StorageFactory(context);
this.atsParserFactory = new ATSParserFactory(context);
this.rmParserFactory = new RMParserFactory(context);
synchronized (localObjects) {
if (localObjects.size() == 0) {
- localObjects.put(Connection.class, new HashMap<String, Object>());
localObjects.put(OperationHandleControllerFactory.class, new HashMap<String, Object>());
localObjects.put(Storage.class, new HashMap<String, Object>());
localObjects.put(IJobControllerFactory.class, new HashMap<String, Object>());
@@ -81,28 +75,11 @@ public class SharedObjectsFactory implements IStorageFactory, IConnectionFactory
}
}
- /**
- * Returns Connection object specific to unique tag
- * @return Hdfs business delegate object
- */
- @Override
- public Connection getHiveConnection() {
- if (!localObjects.get(Connection.class).containsKey(getTagName())) {
- Connection newConnection = hiveConnectionFactory.getHiveConnection();
- localObjects.get(Connection.class).put(getTagName(), newConnection);
- }
- return (Connection) localObjects.get(Connection.class).get(getTagName());
- }
-
- public ConnectionController getHiveConnectionController() {
- return new ConnectionController(getOperationHandleControllerFactory(), getHiveConnection());
- }
-
// =============================
public OperationHandleControllerFactory getOperationHandleControllerFactory() {
if (!localObjects.get(OperationHandleControllerFactory.class).containsKey(getTagName()))
- localObjects.get(OperationHandleControllerFactory.class).put(getTagName(), new OperationHandleControllerFactory(this));
+ localObjects.get(OperationHandleControllerFactory.class).put(getTagName(), new OperationHandleControllerFactory(context, this));
return (OperationHandleControllerFactory) localObjects.get(OperationHandleControllerFactory.class).get(getTagName());
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/databases.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/databases.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/databases.js
index e6bc7f1..8dff5a8 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/databases.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/databases.js
@@ -155,6 +155,12 @@ export default Ember.Controller.extend({
self.set('isLoading');
}).catch(function (error) {
self._handleError(error);
+
+ if(error.status == 401) {
+ self.send('passwordLDAPDB');
+ }
+
+
});
}.on('init'),
@@ -163,6 +169,48 @@ export default Ember.Controller.extend({
this.getDatabases();
},
+ passwordLDAPDB: function(){
+ var self = this,
+ defer = Ember.RSVP.defer();
+
+ self.getDatabases = this.getDatabases;
+
+ this.send('openModal', 'modal-save', {
+ heading: "modals.authenticationLDAP.heading",
+ text:"",
+ type: "password",
+ defer: defer
+ });
+
+ defer.promise.then(function (text) {
+ // make a post call with the given ldap password.
+ var password = text;
+ var pathName = window.location.pathname;
+ var pathNameArray = pathName.split("/");
+ var hiveViewVersion = pathNameArray[3];
+ var hiveViewName = pathNameArray[4];
+ var ldapAuthURL = "/api/v1/views/HIVE/versions/"+ hiveViewVersion + "/instances/" + hiveViewName + "/jobs/auth";
+
+ $.ajax({
+ url: ldapAuthURL,
+ dataType: "json",
+ type: 'post',
+ headers: {'X-Requested-With': 'XMLHttpRequest', 'X-Requested-By': 'ambari'},
+ contentType: 'application/json',
+ data: JSON.stringify({ "password" : password}),
+ success: function( data, textStatus, jQxhr ){
+ console.log( "LDAP done: " + data );
+ self.getDatabases();
+ },
+ error: function( jqXhr, textStatus, errorThrown ){
+ console.log( "LDAP fail: " + errorThrown );
+ self.get('notifyService').error( "Wrong Credentials." );
+ }
+ });
+
+ });
+ },
+
loadSampleData: function (tableName, database) {
var self = this;
this.send('addQuery', Ember.I18n.t('titles.tableSample', { tableName: tableName }));
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js
index 2519df1..735d2c5 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js
@@ -227,6 +227,11 @@ export default Ember.Controller.extend({
self.set('jobSaveSucceeded');
originalModel.set('isRunning', undefined);
defer.reject(err);
+
+ if(err.status == 401) {
+ self.send('passwordLDAP', job, originalModel);
+ }
+
};
job.save().then(function () {
@@ -477,6 +482,51 @@ export default Ember.Controller.extend({
}.observes('model', 'model.status'),
actions: {
+ passwordLDAP: function(){
+ var job = arguments[0],
+ originalModel = arguments[1],
+ self = this,
+ defer = Ember.RSVP.defer();
+
+ self.createJob = this.createJob;
+
+ this.send('openModal', 'modal-save', {
+ heading: "modals.authenticationLDAP.heading",
+ text:"",
+ type: "password",
+ defer: defer
+ });
+
+ defer.promise.then(function (text) {
+ // make a post call with the given ldap password.
+ var password = text;
+ var pathName = window.location.pathname;
+ var pathNameArray = pathName.split("/");
+ var hiveViewVersion = pathNameArray[3];
+ var hiveViewName = pathNameArray[4];
+ var ldapAuthURL = "/api/v1/views/HIVE/versions/"+ hiveViewVersion + "/instances/" + hiveViewName + "/jobs/auth";
+
+
+ $.ajax({
+ url: ldapAuthURL,
+ dataType: "json",
+ type: 'post',
+ headers: {'X-Requested-With': 'XMLHttpRequest', 'X-Requested-By': 'ambari'},
+ contentType: 'application/json',
+ data: JSON.stringify({ "password" : password}),
+ success: function( data, textStatus, jQxhr ){
+ console.log( "LDAP done: " + data );
+ self.createJob (job,originalModel);
+ },
+ error: function( jqXhr, textStatus, errorThrown ){
+ console.log( "LDAP fail: " + errorThrown );
+ self.get('notifyService').error( "Wrong Credentials." );
+ }
+ });
+
+ });
+ },
+
stopCurrentJob: function () {
this.get('jobService').stopJob(this.get('model'));
},
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/modal-save.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/modal-save.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/modal-save.js
index 6dc1349..6c16291 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/modal-save.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/modal-save.js
@@ -23,6 +23,7 @@ export default Ember.Controller.extend({
save: function () {
this.send('closeModal');
this.defer.resolve(this.get('text'));
+ this.defer.resolve(this.get('type'));
},
close: function () {
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
index 8f9f72d..5ae9b7e 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
@@ -98,6 +98,9 @@ TRANSLATIONS = {
changeTitle: {
heading: 'Rename worksheet'
+ },
+ authenticationLDAP: {
+ heading: 'Enter the LDAP password'
}
},
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/application.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/application.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/application.js
index 11b93a1..916b7b3 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/application.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/application.js
@@ -39,6 +39,7 @@ export default Ember.Route.extend({
message: options.message,
heading: options.heading,
text: options.text,
+ type: options.type || "text",
defer: options.defer
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/resources/ui/hive-web/app/services/database.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/services/database.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/services/database.js
index 6b4df6f..58789a3 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/services/database.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/services/database.js
@@ -224,4 +224,4 @@ export default Ember.Service.extend({
return defer.promise;
}
-});
\ No newline at end of file
+});
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/modal-save.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/modal-save.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/modal-save.hbs
index f327e3b..8d83f2b 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/modal-save.hbs
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/modal-save.hbs
@@ -17,5 +17,5 @@
}}
{{#modal-widget heading=heading close="close" ok="save"}}
- {{input type="text" class="form-control" value=text }}
+ {{input type=type class="form-control" value=text }}
{{/modal-widget}}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java
index 2b915f0..25db721 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java
@@ -58,7 +58,6 @@ public abstract class BaseHiveTest {
@Before
public void setUp() throws Exception {
handler = createNiceMock(ViewResourceHandler.class);
- context = createNiceMock(ViewContext.class);
properties = new HashMap<String, String>();
baseDir = new File(DATA_DIRECTORY)
@@ -66,19 +65,31 @@ public abstract class BaseHiveTest {
hiveStorageFile = new File("./target/HiveTest/storage.dat")
.getAbsoluteFile();
+ setupDefaultContextProperties(properties);
+ setupProperties(properties, baseDir);
+
+ context = makeContext(properties, "ambari-qa", "MyHive");
+
+ replay(handler, context);
+ }
+
+ public void setupDefaultContextProperties(Map<String, String> properties) {
properties.put("dataworker.storagePath", hiveStorageFile.toString());
properties.put("scripts.dir", "/tmp/.hiveQueries");
properties.put("jobs.dir", "/tmp/.hiveJobs");
properties.put("yarn.ats.url", "127.0.0.1:8188");
properties.put("yarn.resourcemanager.url", "127.0.0.1:8088");
+ }
+ public ViewContext makeContext(Map<String, String> properties, String username, String instanceName) throws Exception {
+ setupDefaultContextProperties(properties);
setupProperties(properties, baseDir);
+ ViewContext context = createNiceMock(ViewContext.class);
expect(context.getProperties()).andReturn(properties).anyTimes();
- expect(context.getUsername()).andReturn("ambari-qa").anyTimes();
- expect(context.getInstanceName()).andReturn("MyHive").anyTimes();
-
- replay(handler, context);
+ expect(context.getUsername()).andReturn(username).anyTimes();
+ expect(context.getInstanceName()).andReturn(instanceName).anyTimes();
+ return context;
}
protected void setupProperties(Map<String, String> properties, File baseDir) throws Exception {
http://git-wip-us.apache.org/repos/asf/ambari/blob/3f1af535/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/ConnectionTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/ConnectionTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/ConnectionTest.java
new file mode 100644
index 0000000..ba37dc7
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/ConnectionTest.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.util.HashMap;
+
+import static org.junit.Assert.*;
+
+public class ConnectionTest {
+ @Rule
+ public ExpectedException thrown = ExpectedException.none();
+
+ @Test
+ public void testOpenConnection() throws Exception {
+ HashMap<String, String> auth = new HashMap<String, String>();
+ auth.put("auth", "NONE");
+
+ thrown.expect(HiveClientException.class);
+ thrown.expectMessage("Connection refused");
+ new Connection("nonexistent.host.com", 10000, auth, "ambari-qa", null);
+ }
+
+ @Test
+ public void testAskPasswordWithoutPassword() throws Exception {
+ HashMap<String, String> auth = new HashMap<String, String>();
+ auth.put("auth", "NONE");
+ auth.put("password", "${ask_password}");
+
+ thrown.expect(HiveAuthRequiredException.class);
+ new Connection("nonexistent.host.com", 10000, auth, "ambari-qa", null);
+ }
+
+ @Test
+ public void testAskPasswordWithPassword() throws Exception {
+ HashMap<String, String> auth = new HashMap<String, String>();
+ auth.put("auth", "NONE");
+ auth.put("password", "${ask_password}");
+
+ thrown.expect(HiveClientException.class);
+ thrown.expectMessage("Connection refused");
+ new Connection("nonexistent.host.com", 10000, auth, "ambari-qa", "password");
+ }
+}