You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by me...@apache.org on 2021/08/23 08:28:20 UTC

[hbase-connectors] branch master updated: HBASE-26211 Fix decoding of Long values in NaiveEncoder (#83)

This is an automated email from the ASF dual-hosted git repository.

meszibalu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase-connectors.git


The following commit(s) were added to refs/heads/master by this push:
     new 6b4d985  HBASE-26211 Fix decoding of Long values in NaiveEncoder (#83)
6b4d985 is described below

commit 6b4d98536a61d0d876fef3f4efd7a2e5aec26c90
Author: Hristo Iliev <gi...@hiliev.eu>
AuthorDate: Mon Aug 23 11:28:16 2021 +0300

    HBASE-26211 Fix decoding of Long values in NaiveEncoder (#83)
    
    Signed-off-by: Balazs Meszaros <me...@apache.org>
---
 .../hbase/spark/datasources/NaiveEncoder.scala     |  4 +-
 .../hbase/spark/DynamicLogicExpressionSuite.scala  | 68 ++++++++++++++++++++++
 2 files changed, 70 insertions(+), 2 deletions(-)

diff --git a/spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/NaiveEncoder.scala b/spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/NaiveEncoder.scala
index a2a6828..6732869 100644
--- a/spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/NaiveEncoder.scala
+++ b/spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/NaiveEncoder.scala
@@ -240,8 +240,8 @@ class NaiveEncoder extends BytesEncoder with Logging{
         val value = Bytes.toInt(filterBytes, offset2 + 1)
         compare(in.compareTo(value), ops)
       case LongEnc | TimestampEnc =>
-        val in = Bytes.toInt(input, offset1)
-        val value = Bytes.toInt(filterBytes, offset2 + 1)
+        val in = Bytes.toLong(input, offset1)
+        val value = Bytes.toLong(filterBytes, offset2 + 1)
         compare(in.compareTo(value), ops)
       case FloatEnc =>
         val in = Bytes.toFloat(input, offset1)
diff --git a/spark/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpressionSuite.scala b/spark/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpressionSuite.scala
index 0424527..e493c54 100644
--- a/spark/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpressionSuite.scala
+++ b/spark/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/DynamicLogicExpressionSuite.scala
@@ -279,6 +279,74 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
     assert(!builtExpression.execute(columnToCurrentRowValueMap, valueFromQueryValueArray))
   }
 
+  test("Long Type") {
+    val greaterLogic = new GreaterThanLogicExpression("Col1", 0)
+    greaterLogic.setEncoder(encoder)
+    val greaterAndEqualLogic = new GreaterThanOrEqualLogicExpression("Col1", 0)
+    greaterAndEqualLogic.setEncoder(encoder)
+    val lessLogic = new LessThanLogicExpression("Col1", 0)
+    lessLogic.setEncoder(encoder)
+    val lessAndEqualLogic = new LessThanOrEqualLogicExpression("Col1", 0)
+    lessAndEqualLogic.setEncoder(encoder)
+    val equalLogic = new EqualLogicExpression("Col1", 0, false)
+    val notEqualLogic = new EqualLogicExpression("Col1", 0, true)
+
+    val columnToCurrentRowValueMap = new util.HashMap[String, ByteArrayComparable]()
+    columnToCurrentRowValueMap.put("Col1", new ByteArrayComparable(Bytes.toBytes(10L)))
+    val valueFromQueryValueArray = new Array[Array[Byte]](1)
+
+    //great than
+    valueFromQueryValueArray(0) = encoder.encode(LongType, 10L)
+    assert(!greaterLogic.execute(columnToCurrentRowValueMap, valueFromQueryValueArray))
+
+    valueFromQueryValueArray(0) = encoder.encode(LongType, 20L)
+    assert(!greaterLogic.execute(columnToCurrentRowValueMap, valueFromQueryValueArray))
+
+    //great than and equal
+    valueFromQueryValueArray(0) = encoder.encode(LongType, 5L)
+    assert(greaterAndEqualLogic.execute(columnToCurrentRowValueMap,
+      valueFromQueryValueArray))
+
+    valueFromQueryValueArray(0) = encoder.encode(LongType, 10L)
+    assert(greaterAndEqualLogic.execute(columnToCurrentRowValueMap,
+      valueFromQueryValueArray))
+
+    valueFromQueryValueArray(0) = encoder.encode(LongType, 20L)
+    assert(!greaterAndEqualLogic.execute(columnToCurrentRowValueMap,
+      valueFromQueryValueArray))
+
+    //less than
+    valueFromQueryValueArray(0) = encoder.encode(LongType, 10L)
+    assert(!lessLogic.execute(columnToCurrentRowValueMap, valueFromQueryValueArray))
+
+    valueFromQueryValueArray(0) = encoder.encode(LongType, 5L)
+    assert(!lessLogic.execute(columnToCurrentRowValueMap, valueFromQueryValueArray))
+
+    //less than and equal
+    valueFromQueryValueArray(0) = encoder.encode(LongType, 20L)
+    assert(lessAndEqualLogic.execute(columnToCurrentRowValueMap, valueFromQueryValueArray))
+
+    valueFromQueryValueArray(0) = encoder.encode(LongType, 20L)
+    assert(lessAndEqualLogic.execute(columnToCurrentRowValueMap, valueFromQueryValueArray))
+
+    valueFromQueryValueArray(0) = encoder.encode(LongType, 10L)
+    assert(lessAndEqualLogic.execute(columnToCurrentRowValueMap, valueFromQueryValueArray))
+
+    //equal too
+    valueFromQueryValueArray(0) = Bytes.toBytes(10L)
+    assert(equalLogic.execute(columnToCurrentRowValueMap, valueFromQueryValueArray))
+
+    valueFromQueryValueArray(0) = Bytes.toBytes(5L)
+    assert(!equalLogic.execute(columnToCurrentRowValueMap, valueFromQueryValueArray))
+
+    //not equal too
+    valueFromQueryValueArray(0) = Bytes.toBytes(10L)
+    assert(!notEqualLogic.execute(columnToCurrentRowValueMap, valueFromQueryValueArray))
+
+    valueFromQueryValueArray(0) = Bytes.toBytes(5L)
+    assert(notEqualLogic.execute(columnToCurrentRowValueMap, valueFromQueryValueArray))
+  }
+
   test("String Type") {
     val leftLogic = new LessThanLogicExpression("Col1", 0)
     leftLogic.setEncoder(encoder)