You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by li...@apache.org on 2018/01/30 22:11:09 UTC
spark git commit: [SPARK-23275][SQL] hive/tests have been failing
when run locally on the laptop (Mac) with OOM
Repository: spark
Updated Branches:
refs/heads/master 31c00ad8b -> 58fcb5a95
[SPARK-23275][SQL] hive/tests have been failing when run locally on the laptop (Mac) with OOM
## What changes were proposed in this pull request?
hive tests have been failing when they are run locally (Mac Os) after a recent change in the trunk. After running the tests for some time, the test fails with OOM with Error: unable to create new native thread.
I noticed the thread count goes all the way up to 2000+ after which we start getting these OOM errors. Most of the threads seem to be related to the connection pool in hive metastore (BoneCP-xxxxx-xxxx ). This behaviour change is happening after we made the following change to HiveClientImpl.reset()
``` SQL
def reset(): Unit = withHiveState {
try {
// code
} finally {
runSqlHive("USE default") ===> this is causing the issue
}
```
I am proposing to temporarily back-out part of a fix made to address SPARK-23000 to resolve this issue while we work-out the exact reason for this sudden increase in thread counts.
## How was this patch tested?
Ran hive/test multiple times in different machines.
(If this patch involves UI changes, please attach a screenshot; otherwise, remove this)
Please review http://spark.apache.org/contributing.html before opening a pull request.
Author: Dilip Biswal <db...@us.ibm.com>
Closes #20441 from dilipbiswal/hive_tests.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/58fcb5a9
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/58fcb5a9
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/58fcb5a9
Branch: refs/heads/master
Commit: 58fcb5a95ee0b91300138cd23f3ce2165fab597f
Parents: 31c00ad
Author: Dilip Biswal <db...@us.ibm.com>
Authored: Tue Jan 30 14:11:06 2018 -0800
Committer: gatorsmile <ga...@gmail.com>
Committed: Tue Jan 30 14:11:06 2018 -0800
----------------------------------------------------------------------
.../spark/sql/hive/client/HiveClientImpl.scala | 26 +++++++++-----------
1 file changed, 11 insertions(+), 15 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/58fcb5a9/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
index 39d8390..6c0f414 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
@@ -825,23 +825,19 @@ private[hive] class HiveClientImpl(
}
def reset(): Unit = withHiveState {
- try {
- client.getAllTables("default").asScala.foreach { t =>
- logDebug(s"Deleting table $t")
- val table = client.getTable("default", t)
- client.getIndexes("default", t, 255).asScala.foreach { index =>
- shim.dropIndex(client, "default", t, index.getIndexName)
- }
- if (!table.isIndexTable) {
- client.dropTable("default", t)
- }
+ client.getAllTables("default").asScala.foreach { t =>
+ logDebug(s"Deleting table $t")
+ val table = client.getTable("default", t)
+ client.getIndexes("default", t, 255).asScala.foreach { index =>
+ shim.dropIndex(client, "default", t, index.getIndexName)
}
- client.getAllDatabases.asScala.filterNot(_ == "default").foreach { db =>
- logDebug(s"Dropping Database: $db")
- client.dropDatabase(db, true, false, true)
+ if (!table.isIndexTable) {
+ client.dropTable("default", t)
}
- } finally {
- runSqlHive("USE default")
+ }
+ client.getAllDatabases.asScala.filterNot(_ == "default").foreach { db =>
+ logDebug(s"Dropping Database: $db")
+ client.dropDatabase(db, true, false, true)
}
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org