You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by zh...@apache.org on 2021/07/16 10:16:23 UTC
[kylin] branch kylin-on-parquet-v2 updated: KYLIN-5021 FilePruner
throws NPE when there is no timePartitionColunm in cube (#1680)
This is an automated email from the ASF dual-hosted git repository.
zhangzc pushed a commit to branch kylin-on-parquet-v2
in repository https://gitbox.apache.org/repos/asf/kylin.git
The following commit(s) were added to refs/heads/kylin-on-parquet-v2 by this push:
new 4100e50 KYLIN-5021 FilePruner throws NPE when there is no timePartitionColunm in cube (#1680)
4100e50 is described below
commit 4100e5007624ae29b8e5e97319261064896ea699
Author: tianhui5 <82...@qq.com>
AuthorDate: Fri Jul 16 18:16:13 2021 +0800
KYLIN-5021 FilePruner throws NPE when there is no timePartitionColunm in cube (#1680)
* KYLIN-5021 FilePruner throws NPE when there is no timePartitionColumn in cube
* minor, use isDefined to check None
* optimize performance of file pruner
Co-authored-by: tianhui5 <ti...@xiaomi.com>
Co-authored-by: Congling XIA <xi...@xiaomi.com>
---
.../scala/org/apache/spark/sql/execution/datasource/FilePruner.scala | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/kylin-spark-project/kylin-spark-common/src/main/scala/org/apache/spark/sql/execution/datasource/FilePruner.scala b/kylin-spark-project/kylin-spark-common/src/main/scala/org/apache/spark/sql/execution/datasource/FilePruner.scala
index 2784170..0c5cfbf 100644
--- a/kylin-spark-project/kylin-spark-common/src/main/scala/org/apache/spark/sql/execution/datasource/FilePruner.scala
+++ b/kylin-spark-project/kylin-spark-common/src/main/scala/org/apache/spark/sql/execution/datasource/FilePruner.scala
@@ -295,10 +295,13 @@ class FilePruner(cubeInstance: CubeInstance,
}
private def getSegmentFilter(dataFilters: Seq[Expression], col: Attribute): Seq[Expression] = {
- dataFilters.map(extractSegmentFilter(_, col)).filter(!_.equals(None)).map(_.get)
+ dataFilters.map(extractSegmentFilter(_, col)).filter(_.isDefined).map(_.get)
}
private def extractSegmentFilter(filter: Expression, col: Attribute): Option[Expression] = {
+ if (col == null) {
+ return None
+ }
filter match {
case expressions.Or(left, right) =>
val leftChild = extractSegmentFilter(left, col)