You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2021/02/26 12:33:42 UTC
[spark] branch master updated: [SPARK-34554][SQL] Implement the
copy() method in ColumnarMap
This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new c1beb16 [SPARK-34554][SQL] Implement the copy() method in ColumnarMap
c1beb16 is described below
commit c1beb16cc8db9f61f1b86b5bfa4cd4d603c9b990
Author: Max Gekk <ma...@gmail.com>
AuthorDate: Fri Feb 26 21:33:14 2021 +0900
[SPARK-34554][SQL] Implement the copy() method in ColumnarMap
### What changes were proposed in this pull request?
Implement `ColumnarMap.copy()` by using the `copy()` method of `ColumnarArray`.
### Why are the changes needed?
To eliminate `java.lang.UnsupportedOperationException` while using `ColumnarMap`.
### Does this PR introduce _any_ user-facing change?
Yes
### How was this patch tested?
By running new tests in `ColumnarBatchSuite`.
Closes #31663 from MaxGekk/columnar-map-copy.
Authored-by: Max Gekk <ma...@gmail.com>
Signed-off-by: HyukjinKwon <gu...@apache.org>
---
.../java/org/apache/spark/sql/vectorized/ColumnarMap.java | 5 +++--
.../spark/sql/execution/vectorized/ColumnarBatchSuite.scala | 12 +++++++++++-
2 files changed, 14 insertions(+), 3 deletions(-)
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarMap.java b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarMap.java
index 35648e3..6b3d518 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarMap.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarMap.java
@@ -17,6 +17,7 @@
package org.apache.spark.sql.vectorized;
+import org.apache.spark.sql.catalyst.util.ArrayBasedMapData;
import org.apache.spark.sql.catalyst.util.MapData;
/**
@@ -47,7 +48,7 @@ public final class ColumnarMap extends MapData {
}
@Override
- public ColumnarMap copy() {
- throw new UnsupportedOperationException();
+ public MapData copy() {
+ return new ArrayBasedMapData(keys.copy(), values.copy());
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/vectorized/ColumnarBatchSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/vectorized/ColumnarBatchSuite.scala
index a369b2d..bd69bab 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/vectorized/ColumnarBatchSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/vectorized/ColumnarBatchSuite.scala
@@ -32,7 +32,7 @@ import org.apache.spark.memory.MemoryMode
import org.apache.spark.sql.{RandomDataGenerator, Row}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
-import org.apache.spark.sql.catalyst.util.{ArrayBasedMapBuilder, DateTimeUtils, GenericArrayData}
+import org.apache.spark.sql.catalyst.util.{ArrayBasedMapBuilder, DateTimeUtils, GenericArrayData, MapData}
import org.apache.spark.sql.execution.RowToColumnConverter
import org.apache.spark.sql.types._
import org.apache.spark.sql.util.ArrowUtils
@@ -896,6 +896,16 @@ class ColumnarBatchSuite extends SparkFunSuite {
assert(a4.asScala == Map())
assert(a5.asScala == Map(3 -> 6, 4 -> 8, 5 -> 10))
+ def toScalaMap(mapData: MapData): Map[Int, Int] = {
+ val keys = mapData.keyArray().toSeq[Int](IntegerType)
+ val values = mapData.valueArray().toSeq[Int](IntegerType)
+ (keys zip values).toMap
+ }
+ assert(toScalaMap(column.getMap(0).copy()) === Map(0 -> 0))
+ assert(toScalaMap(column.getMap(1).copy()) === Map(1 -> 2, 2 -> 4))
+ assert(toScalaMap(column.getMap(3).copy()) === Map())
+ assert(toScalaMap(column.getMap(4).copy()) === Map(3 -> 6, 4 -> 8, 5 -> 10))
+
column.close()
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org