You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by al...@apache.org on 2019/03/06 03:34:58 UTC

[asterixdb] branch master updated: [ASTERIXDB-2523][RT][COMP] add support for hashing array fields

This is an automated email from the ASF dual-hosted git repository.

alsuliman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git


The following commit(s) were added to refs/heads/master by this push:
     new 8552311  [ASTERIXDB-2523][RT][COMP] add support for hashing array fields
8552311 is described below

commit 8552311a02724d1a77bfa831c16963ede1cc985b
Author: Ali Alsuliman <al...@gmail.com>
AuthorDate: Mon Mar 4 12:59:53 2019 -0800

    [ASTERIXDB-2523][RT][COMP] add support for hashing array fields
    
    - user model changes: no
    - storage format changes: no
    - interface changes: no
    
    Details:
    Add support for hashing array fields.
    - Modified AMurmurHash3BinaryHashFunctionFamily and extracted the hashing function
    into a private named hashing function "GenericHashFunction". Added hashing arrays.
    - Modified hash join to include generating hash functions for the right branch
    since now hash functions are type-dependent and cannot use the same hash functions
    generated for the left branch.
    - Added test cases.
    
    Change-Id: Ibd0dc7f270730140226f54445705822049f5c863
    Reviewed-on: https://asterix-gerrit.ics.uci.edu/3241
    Integration-Tests: Jenkins <je...@fulliautomatix.ics.uci.edu>
    Reviewed-by: Till Westmann <ti...@apache.org>
    Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>
    Contrib: Jenkins <je...@fulliautomatix.ics.uci.edu>
---
 asterixdb/asterix-app/data/complex/arrays2.adm     |   5 -
 .../queries_sqlpp/distinct/array/array.1.ddl.sqlpp |  21 ++-
 .../distinct/array/array.2.update.sqlpp            |   8 +-
 .../distinct/array/array.3.query.sqlpp             |   9 +-
 .../distinct/array/array.4.query.sqlpp             |   9 +-
 .../queries_sqlpp/distinct/array/array.5.ddl.sqlpp |   7 +-
 .../group-by/gby-array/gby-array.1.ddl.sqlpp}      |  29 ++--
 .../group-by/gby-array/gby-array.2.update.sqlpp    |   8 +-
 .../group-by/gby-array/gby-array.3.query.sqlpp     |  10 +-
 .../group-by/gby-array/gby-array.4.query.sqlpp     |  10 +-
 .../group-by/gby-array/gby-array.5.ddl.sqlpp       |   7 +-
 .../hash_join_array/hash_join_array.1.ddl.sqlpp    |  24 ++-
 .../hash_join_array/hash_join_array.10.query.sqlpp |   9 +-
 .../hash_join_array/hash_join_array.11.ddl.sqlpp   |   7 +-
 .../hash_join_array/hash_join_array.2.update.sqlpp |  12 +-
 .../hash_join_array/hash_join_array.3.query.sqlpp  |   9 +-
 .../hash_join_array/hash_join_array.4.query.sqlpp  |   9 +-
 .../hash_join_array/hash_join_array.5.query.sqlpp  |   9 +-
 .../hash_join_array/hash_join_array.6.query.sqlpp  |   9 +-
 .../hash_join_array/hash_join_array.7.query.sqlpp  |   9 +-
 .../hash_join_array/hash_join_array.8.query.sqlpp  |   9 +-
 .../hash_join_array/hash_join_array.9.query.sqlpp  |   9 +-
 .../runtimets/results/distinct/array/array.3.adm   |  14 ++
 .../runtimets/results/distinct/array/array.4.adm   |  14 ++
 .../results/group-by/gby-array/gby-array.3.adm     |  14 ++
 .../results/group-by/gby-array/gby-array.4.adm     |  14 ++
 .../join/hash_join_array/hash_join_array.10.adm    |  25 +++
 .../join/hash_join_array/hash_join_array.3.adm     |  18 +++
 .../join/hash_join_array/hash_join_array.4.adm     |  18 +++
 .../join/hash_join_array/hash_join_array.5.adm     |  18 +++
 .../join/hash_join_array/hash_join_array.6.adm     |  18 +++
 .../join/hash_join_array/hash_join_array.7.adm     |  25 +++
 .../join/hash_join_array/hash_join_array.8.adm     |  25 +++
 .../join/hash_join_array/hash_join_array.9.adm     |  25 +++
 .../test/resources/runtimets/testsuite_sqlpp.xml   |  17 +++
 .../AbstractAGenericBinaryComparator.java          |  31 ++--
 .../hash/AMurmurHash3BinaryHashFunctionFamily.java | 169 +++++++++++++--------
 .../BinaryHashFunctionFactoryProvider.java         |   5 +-
 .../BinaryHashFunctionFamilyProvider.java          |  12 +-
 .../om/typecomputer/impl/TypeComputeUtils.java     |   6 +
 .../asterix/om/util/container/IObjectFactory.java  |  11 +-
 .../{IObjectFactory.java => ObjectFactories.java}  |  26 ++--
 .../physical/HybridHashJoinPOperator.java          |  24 +--
 .../physical/InMemoryHashJoinPOperator.java        |  15 +-
 .../data/IBinaryHashFunctionFactoryProvider.java   |  17 ++-
 .../data/IBinaryHashFunctionFamilyProvider.java    |  17 ++-
 .../api/dataflow/value/IBinaryHashFunction.java    |   6 +
 .../dataflow/value/IBinaryHashFunctionFactory.java |  14 +-
 .../dataflow/value/IBinaryHashFunctionFamily.java  |  16 +-
 .../apache/hyracks/api/exceptions/ErrorCode.java   |   1 +
 .../src/main/resources/errormsg/en.properties      |   1 +
 .../join/InMemoryHashJoinOperatorDescriptor.java   |  26 ++--
 .../OptimizedHybridHashJoinOperatorDescriptor.java |  27 ++--
 .../integration/TPCHCustomerOrderHashJoinTest.java |  11 ++
 .../apache/hyracks/examples/tpch/client/Join.java  |   3 +
 55 files changed, 640 insertions(+), 281 deletions(-)

diff --git a/asterixdb/asterix-app/data/complex/arrays2.adm b/asterixdb/asterix-app/data/complex/arrays2.adm
index fe56a69..ce2ae50 100644
--- a/asterixdb/asterix-app/data/complex/arrays2.adm
+++ b/asterixdb/asterix-app/data/complex/arrays2.adm
@@ -1,20 +1,15 @@
 {"id":1, "name":"Margarita", "dept_ids": [4]}
 {"id":2, "name":"Isac", "dept_ids": [99, 12, 14, 15]}
-{"id":3, "name":"Emory", "dept_ids": [33, 3, 16]}
-{"id":4, "name":"Nicholas", "dept_ids": [1, 5, 6]}
 {"id":5, "name":"Von", "dept_ids": [3, 2, 8]}
 {"id":6, "name":"Willis", "dept_ids": []}
-{"id":7, "name":"Suzanna", "dept_ids": [4]}
 {"id":8, "name":"Nicole", "dept_ids": [33, 11, 3, 16]}
 {"id":9, "name":"Woodrow", "dept_ids": null}
-{"id":10, "name":"Bram", "dept_ids": []}
 {"id":11, "name":"Nicholas", "dept_ids":[3, 2, 8] }
 {"id":12, "name":"John", "dept_ids": [9, 5, 2]}
 {"id":13, "name":"Steve"}
 {"id":14, "name":"Jay", "dept_ids": [9, 5, 2]}
 {"id":15, "name":"Jim", "dept_ids": null}
 {"id":16, "name":"Wail", "dept_ids": [2]}
-{"id":17, "name":"Jim", "dept_ids": [8, 2]}
 {"id":18, "name":"Kayle"}
 {"id":19, "name":"Mart", "dept_ids": [8, 2, 1, 7, 9]}
 {"id":20, "name":"Mai", "dept_ids": [1, 5, 9]}
diff --git a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/distinct/array/array.1.ddl.sqlpp
similarity index 69%
copy from hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/distinct/array/array.1.ddl.sqlpp
index 93dd3d5..d4a5d7e 100644
--- a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/distinct/array/array.1.ddl.sqlpp
@@ -17,12 +17,21 @@
  * under the License.
  */
 
-package org.apache.hyracks.algebricks.data;
+// testing comparing and hashing fields whose values are arrays
+drop  dataverse test if exists;
+create  dataverse test;
+use test;
 
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+create type closedType as closed {
+id: int,
+name: string,
+dept_ids: [int]?
+};
 
-public interface IBinaryHashFunctionFamilyProvider {
+create type openType as {
+id: int,
+name: string
+};
 
-    public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type) throws AlgebricksException;
-}
+create dataset closedDs(closedType) primary key id;
+create dataset openDs(openType) primary key id;
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/distinct/array/array.2.update.sqlpp
similarity index 78%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/distinct/array/array.2.update.sqlpp
index e512416..98b5875 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/distinct/array/array.2.update.sqlpp
@@ -16,10 +16,8 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
+use test;
 
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+load dataset closedDs using localfs (("path"="asterix_nc1://data/complex/arrays1.adm"),("format"="adm"));
+load dataset openDs using localfs (("path"="asterix_nc1://data/complex/arrays1.adm"),("format"="adm"));
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/distinct/array/array.3.query.sqlpp
similarity index 79%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/distinct/array/array.3.query.sqlpp
index e512416..28207b9 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/distinct/array/array.3.query.sqlpp
@@ -16,10 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
+use test;
 
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+from openDs v
+select distinct v.dept_ids
+order by v.dept_ids;
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/distinct/array/array.4.query.sqlpp
similarity index 79%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/distinct/array/array.4.query.sqlpp
index e512416..297286e 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/distinct/array/array.4.query.sqlpp
@@ -16,10 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
+use test;
 
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+from closedDs v
+select distinct v.dept_ids
+order by v.dept_ids;
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/distinct/array/array.5.ddl.sqlpp
old mode 100644
new mode 100755
similarity index 79%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/distinct/array/array.5.ddl.sqlpp
index e512416..269f673
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/distinct/array/array.5.ddl.sqlpp
@@ -16,10 +16,5 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
-
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+drop  dataverse test;
\ No newline at end of file
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/container/IObjectFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/gby-array/gby-array.1.ddl.sqlpp
similarity index 69%
copy from asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/container/IObjectFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/gby-array/gby-array.1.ddl.sqlpp
index 48dcbfb..2e1ce7b 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/container/IObjectFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/gby-array/gby-array.1.ddl.sqlpp
@@ -17,18 +17,21 @@
  * under the License.
  */
 
-package org.apache.asterix.om.util.container;
+// testing grouping by fields whose values are arrays
+drop  dataverse test if exists;
+create  dataverse test;
+use test;
 
-/**
- * A factory interface to create objects.
- */
-public interface IObjectFactory<E, T> {
+create type closedType as closed {
+id: int,
+name: string,
+dept_ids: [int]?
+};
+
+create type openType as {
+id: int,
+name: string
+};
 
-    /**
-     * create an element of type E
-     *
-     * @param arg
-     * @return an E element
-     */
-    public E create(T arg);
-}
+create dataset closedDs(closedType) primary key id;
+create dataset openDs(openType) primary key id;
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/gby-array/gby-array.2.update.sqlpp
similarity index 78%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/gby-array/gby-array.2.update.sqlpp
index e512416..98b5875 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/gby-array/gby-array.2.update.sqlpp
@@ -16,10 +16,8 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
+use test;
 
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+load dataset closedDs using localfs (("path"="asterix_nc1://data/complex/arrays1.adm"),("format"="adm"));
+load dataset openDs using localfs (("path"="asterix_nc1://data/complex/arrays1.adm"),("format"="adm"));
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/gby-array/gby-array.3.query.sqlpp
similarity index 79%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/gby-array/gby-array.3.query.sqlpp
index e512416..c8616bc 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/gby-array/gby-array.3.query.sqlpp
@@ -16,10 +16,10 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
+use test;
 
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+from openDs v
+group by v.dept_ids
+select count(*) as count, v.dept_ids
+order by v.dept_ids;
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/gby-array/gby-array.4.query.sqlpp
similarity index 79%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/gby-array/gby-array.4.query.sqlpp
index e512416..1575869 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/gby-array/gby-array.4.query.sqlpp
@@ -16,10 +16,10 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
+use test;
 
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+from closedDs v
+group by v.dept_ids
+select count(*) as count, v.dept_ids
+order by v.dept_ids;
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/gby-array/gby-array.5.ddl.sqlpp
old mode 100644
new mode 100755
similarity index 79%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/gby-array/gby-array.5.ddl.sqlpp
index e512416..269f673
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/gby-array/gby-array.5.ddl.sqlpp
@@ -16,10 +16,5 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
-
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+drop  dataverse test;
\ No newline at end of file
diff --git a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.1.ddl.sqlpp
similarity index 64%
copy from hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.1.ddl.sqlpp
index 93dd3d5..617ab1c 100644
--- a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.1.ddl.sqlpp
@@ -17,12 +17,24 @@
  * under the License.
  */
 
-package org.apache.hyracks.algebricks.data;
+// testing joining on fields whose values are arrays
+drop  dataverse test if exists;
+create  dataverse test;
+use test;
 
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+create type closedType as closed {
+id: int,
+name: string,
+dept_ids: [int]?
+};
 
-public interface IBinaryHashFunctionFamilyProvider {
+create type openType as {
+id: int,
+name: string
+};
 
-    public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type) throws AlgebricksException;
-}
+create dataset closedDs1(closedType) primary key id;
+create dataset openDs1(openType) primary key id;
+
+create dataset closedDs2(closedType) primary key id;
+create dataset openDs2(openType) primary key id;
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.10.query.sqlpp
similarity index 79%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.10.query.sqlpp
index e512416..898e488 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.10.query.sqlpp
@@ -16,10 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
+use test;
 
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+from closedDs1 v1 LEFT OUTER JOIN closedDs2 v2 ON v1.dept_ids = v2.dept_ids
+select *
+order by v1.id, v2.id;
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.11.ddl.sqlpp
old mode 100644
new mode 100755
similarity index 79%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.11.ddl.sqlpp
index e512416..269f673
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.11.ddl.sqlpp
@@ -16,10 +16,5 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
-
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+drop  dataverse test;
\ No newline at end of file
diff --git a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.2.update.sqlpp
similarity index 65%
copy from hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.2.update.sqlpp
index 93dd3d5..b87bf16 100644
--- a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.2.update.sqlpp
@@ -17,12 +17,10 @@
  * under the License.
  */
 
-package org.apache.hyracks.algebricks.data;
+use test;
 
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+load dataset closedDs1 using localfs (("path"="asterix_nc1://data/complex/arrays1.adm"),("format"="adm"));
+load dataset openDs1 using localfs (("path"="asterix_nc1://data/complex/arrays1.adm"),("format"="adm"));
 
-public interface IBinaryHashFunctionFamilyProvider {
-
-    public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type) throws AlgebricksException;
-}
+load dataset closedDs2 using localfs (("path"="asterix_nc1://data/complex/arrays2.adm"),("format"="adm"));
+load dataset openDs2 using localfs (("path"="asterix_nc1://data/complex/arrays2.adm"),("format"="adm"));
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.3.query.sqlpp
similarity index 79%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.3.query.sqlpp
index e512416..8fa562a 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.3.query.sqlpp
@@ -16,10 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
+use test;
 
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+from openDs1 v1 JOIN openDs2 v2 ON v1.dept_ids = v2.dept_ids
+select *
+order by v1.id, v2.id;
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.4.query.sqlpp
similarity index 79%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.4.query.sqlpp
index e512416..8f420a7 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.4.query.sqlpp
@@ -16,10 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
+use test;
 
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+from openDs1 v1 JOIN closedDs2 v2 ON v1.dept_ids = v2.dept_ids
+select *
+order by v1.id, v2.id;
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.5.query.sqlpp
similarity index 79%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.5.query.sqlpp
index e512416..d985379 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.5.query.sqlpp
@@ -16,10 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
+use test;
 
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+from closedDs1 v1 JOIN openDs2 v2 ON v1.dept_ids = v2.dept_ids
+select *
+order by v1.id, v2.id;
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.6.query.sqlpp
similarity index 79%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.6.query.sqlpp
index e512416..5406c11 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.6.query.sqlpp
@@ -16,10 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
+use test;
 
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+from closedDs1 v1 JOIN closedDs2 v2 ON v1.dept_ids = v2.dept_ids
+select *
+order by v1.id, v2.id;
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.7.query.sqlpp
similarity index 79%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.7.query.sqlpp
index e512416..017213c 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.7.query.sqlpp
@@ -16,10 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
+use test;
 
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+from openDs1 v1 LEFT OUTER JOIN openDs2 v2 ON v1.dept_ids = v2.dept_ids
+select *
+order by v1.id, v2.id;
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.8.query.sqlpp
similarity index 79%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.8.query.sqlpp
index e512416..1463ea6 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.8.query.sqlpp
@@ -16,10 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
+use test;
 
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+from openDs1 v1 LEFT OUTER JOIN closedDs2 v2 ON v1.dept_ids = v2.dept_ids
+select *
+order by v1.id, v2.id;
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.9.query.sqlpp
similarity index 79%
copy from hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.9.query.sqlpp
index e512416..526bc06 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/join/hash_join_array/hash_join_array.9.query.sqlpp
@@ -16,10 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.api.dataflow.value;
 
-import java.io.Serializable;
+use test;
 
-public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
-}
+from closedDs1 v1 LEFT OUTER JOIN openDs2 v2 ON v1.dept_ids = v2.dept_ids
+select *
+order by v1.id, v2.id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/distinct/array/array.3.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/distinct/array/array.3.adm
new file mode 100644
index 0000000..89ceb43
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/distinct/array/array.3.adm
@@ -0,0 +1,14 @@
+{  }
+{ "dept_ids": null }
+{ "dept_ids": [  ] }
+{ "dept_ids": [ 1, 5, 6 ] }
+{ "dept_ids": [ 1, 5, 9 ] }
+{ "dept_ids": [ 2 ] }
+{ "dept_ids": [ 3, 2, 8 ] }
+{ "dept_ids": [ 3, 5, 8 ] }
+{ "dept_ids": [ 4 ] }
+{ "dept_ids": [ 8, 2 ] }
+{ "dept_ids": [ 8, 2, 1, 7, 9 ] }
+{ "dept_ids": [ 9, 5, 2 ] }
+{ "dept_ids": [ 33, 11, 3, 16 ] }
+{ "dept_ids": [ 99, 12, 14, 15, 77 ] }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/distinct/array/array.4.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/distinct/array/array.4.adm
new file mode 100644
index 0000000..89ceb43
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/distinct/array/array.4.adm
@@ -0,0 +1,14 @@
+{  }
+{ "dept_ids": null }
+{ "dept_ids": [  ] }
+{ "dept_ids": [ 1, 5, 6 ] }
+{ "dept_ids": [ 1, 5, 9 ] }
+{ "dept_ids": [ 2 ] }
+{ "dept_ids": [ 3, 2, 8 ] }
+{ "dept_ids": [ 3, 5, 8 ] }
+{ "dept_ids": [ 4 ] }
+{ "dept_ids": [ 8, 2 ] }
+{ "dept_ids": [ 8, 2, 1, 7, 9 ] }
+{ "dept_ids": [ 9, 5, 2 ] }
+{ "dept_ids": [ 33, 11, 3, 16 ] }
+{ "dept_ids": [ 99, 12, 14, 15, 77 ] }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/group-by/gby-array/gby-array.3.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/group-by/gby-array/gby-array.3.adm
new file mode 100644
index 0000000..fe8e7c7
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/group-by/gby-array/gby-array.3.adm
@@ -0,0 +1,14 @@
+{ "count": 2 }
+{ "count": 2, "dept_ids": null }
+{ "count": 2, "dept_ids": [  ] }
+{ "count": 1, "dept_ids": [ 1, 5, 6 ] }
+{ "count": 1, "dept_ids": [ 1, 5, 9 ] }
+{ "count": 1, "dept_ids": [ 2 ] }
+{ "count": 2, "dept_ids": [ 3, 2, 8 ] }
+{ "count": 1, "dept_ids": [ 3, 5, 8 ] }
+{ "count": 2, "dept_ids": [ 4 ] }
+{ "count": 1, "dept_ids": [ 8, 2 ] }
+{ "count": 1, "dept_ids": [ 8, 2, 1, 7, 9 ] }
+{ "count": 2, "dept_ids": [ 9, 5, 2 ] }
+{ "count": 2, "dept_ids": [ 33, 11, 3, 16 ] }
+{ "count": 1, "dept_ids": [ 99, 12, 14, 15, 77 ] }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/group-by/gby-array/gby-array.4.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/group-by/gby-array/gby-array.4.adm
new file mode 100644
index 0000000..fe8e7c7
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/group-by/gby-array/gby-array.4.adm
@@ -0,0 +1,14 @@
+{ "count": 2 }
+{ "count": 2, "dept_ids": null }
+{ "count": 2, "dept_ids": [  ] }
+{ "count": 1, "dept_ids": [ 1, 5, 6 ] }
+{ "count": 1, "dept_ids": [ 1, 5, 9 ] }
+{ "count": 1, "dept_ids": [ 2 ] }
+{ "count": 2, "dept_ids": [ 3, 2, 8 ] }
+{ "count": 1, "dept_ids": [ 3, 5, 8 ] }
+{ "count": 2, "dept_ids": [ 4 ] }
+{ "count": 1, "dept_ids": [ 8, 2 ] }
+{ "count": 1, "dept_ids": [ 8, 2, 1, 7, 9 ] }
+{ "count": 2, "dept_ids": [ 9, 5, 2 ] }
+{ "count": 2, "dept_ids": [ 33, 11, 3, 16 ] }
+{ "count": 1, "dept_ids": [ 99, 12, 14, 15, 77 ] }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.10.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.10.adm
new file mode 100644
index 0000000..8aabb36
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.10.adm
@@ -0,0 +1,25 @@
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 2, "name": "Isac", "dept_ids": [ 99, 12, 14, 15, 77 ] } }
+{ "v1": { "id": 3, "name": "Emory", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 4, "name": "Nicholas", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 5, "name": "Von", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 6, "name": "Willis", "dept_ids": [ 1, 5, 6 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 10, "name": "Bram", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 11, "name": "Nicholas", "dept_ids": null } }
+{ "v1": { "id": 12, "name": "John", "dept_ids": [ 2 ] }, "v2": { "id": 16, "name": "Wail", "dept_ids": [ 2 ] } }
+{ "v1": { "id": 13, "name": "Steve", "dept_ids": [ 8, 2 ] } }
+{ "v1": { "id": 14, "name": "Jay", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 15, "name": "Jim", "dept_ids": null } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 17, "name": "Jim" } }
+{ "v1": { "id": 18, "name": "Kayle", "dept_ids": [ 8, 2, 1, 7, 9 ] }, "v2": { "id": 19, "name": "Mart", "dept_ids": [ 8, 2, 1, 7, 9 ] } }
+{ "v1": { "id": 19, "name": "Mart" } }
+{ "v1": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] }, "v2": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] } }
+{ "v1": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] }, "v2": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.3.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.3.adm
new file mode 100644
index 0000000..fa08005
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.3.adm
@@ -0,0 +1,18 @@
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 3, "name": "Emory", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 4, "name": "Nicholas", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 5, "name": "Von", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 10, "name": "Bram", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 12, "name": "John", "dept_ids": [ 2 ] }, "v2": { "id": 16, "name": "Wail", "dept_ids": [ 2 ] } }
+{ "v1": { "id": 14, "name": "Jay", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 18, "name": "Kayle", "dept_ids": [ 8, 2, 1, 7, 9 ] }, "v2": { "id": 19, "name": "Mart", "dept_ids": [ 8, 2, 1, 7, 9 ] } }
+{ "v1": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] }, "v2": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] } }
+{ "v1": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] }, "v2": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.4.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.4.adm
new file mode 100644
index 0000000..fa08005
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.4.adm
@@ -0,0 +1,18 @@
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 3, "name": "Emory", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 4, "name": "Nicholas", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 5, "name": "Von", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 10, "name": "Bram", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 12, "name": "John", "dept_ids": [ 2 ] }, "v2": { "id": 16, "name": "Wail", "dept_ids": [ 2 ] } }
+{ "v1": { "id": 14, "name": "Jay", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 18, "name": "Kayle", "dept_ids": [ 8, 2, 1, 7, 9 ] }, "v2": { "id": 19, "name": "Mart", "dept_ids": [ 8, 2, 1, 7, 9 ] } }
+{ "v1": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] }, "v2": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] } }
+{ "v1": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] }, "v2": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.5.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.5.adm
new file mode 100644
index 0000000..c1f1595
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.5.adm
@@ -0,0 +1,18 @@
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 3, "name": "Emory", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 4, "name": "Nicholas", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 5, "name": "Von", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 10, "name": "Bram", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 12, "name": "John", "dept_ids": [ 2 ] }, "v2": { "id": 16, "name": "Wail", "dept_ids": [ 2 ] } }
+{ "v1": { "id": 14, "name": "Jay", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 18, "name": "Kayle", "dept_ids": [ 8, 2, 1, 7, 9 ] }, "v2": { "id": 19, "name": "Mart", "dept_ids": [ 8, 2, 1, 7, 9 ] } }
+{ "v1": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] }, "v2": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] } }
+{ "v1": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] }, "v2": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.6.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.6.adm
new file mode 100644
index 0000000..c1f1595
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.6.adm
@@ -0,0 +1,18 @@
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 3, "name": "Emory", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 4, "name": "Nicholas", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 5, "name": "Von", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 10, "name": "Bram", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 12, "name": "John", "dept_ids": [ 2 ] }, "v2": { "id": 16, "name": "Wail", "dept_ids": [ 2 ] } }
+{ "v1": { "id": 14, "name": "Jay", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 18, "name": "Kayle", "dept_ids": [ 8, 2, 1, 7, 9 ] }, "v2": { "id": 19, "name": "Mart", "dept_ids": [ 8, 2, 1, 7, 9 ] } }
+{ "v1": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] }, "v2": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] } }
+{ "v1": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] }, "v2": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.7.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.7.adm
new file mode 100644
index 0000000..8aabb36
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.7.adm
@@ -0,0 +1,25 @@
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 2, "name": "Isac", "dept_ids": [ 99, 12, 14, 15, 77 ] } }
+{ "v1": { "id": 3, "name": "Emory", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 4, "name": "Nicholas", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 5, "name": "Von", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 6, "name": "Willis", "dept_ids": [ 1, 5, 6 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 10, "name": "Bram", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 11, "name": "Nicholas", "dept_ids": null } }
+{ "v1": { "id": 12, "name": "John", "dept_ids": [ 2 ] }, "v2": { "id": 16, "name": "Wail", "dept_ids": [ 2 ] } }
+{ "v1": { "id": 13, "name": "Steve", "dept_ids": [ 8, 2 ] } }
+{ "v1": { "id": 14, "name": "Jay", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 15, "name": "Jim", "dept_ids": null } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 17, "name": "Jim" } }
+{ "v1": { "id": 18, "name": "Kayle", "dept_ids": [ 8, 2, 1, 7, 9 ] }, "v2": { "id": 19, "name": "Mart", "dept_ids": [ 8, 2, 1, 7, 9 ] } }
+{ "v1": { "id": 19, "name": "Mart" } }
+{ "v1": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] }, "v2": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] } }
+{ "v1": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] }, "v2": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.8.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.8.adm
new file mode 100644
index 0000000..8aabb36
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.8.adm
@@ -0,0 +1,25 @@
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 2, "name": "Isac", "dept_ids": [ 99, 12, 14, 15, 77 ] } }
+{ "v1": { "id": 3, "name": "Emory", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 4, "name": "Nicholas", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 5, "name": "Von", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 6, "name": "Willis", "dept_ids": [ 1, 5, 6 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 10, "name": "Bram", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 11, "name": "Nicholas", "dept_ids": null } }
+{ "v1": { "id": 12, "name": "John", "dept_ids": [ 2 ] }, "v2": { "id": 16, "name": "Wail", "dept_ids": [ 2 ] } }
+{ "v1": { "id": 13, "name": "Steve", "dept_ids": [ 8, 2 ] } }
+{ "v1": { "id": 14, "name": "Jay", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 15, "name": "Jim", "dept_ids": null } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 17, "name": "Jim" } }
+{ "v1": { "id": 18, "name": "Kayle", "dept_ids": [ 8, 2, 1, 7, 9 ] }, "v2": { "id": 19, "name": "Mart", "dept_ids": [ 8, 2, 1, 7, 9 ] } }
+{ "v1": { "id": 19, "name": "Mart" } }
+{ "v1": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] }, "v2": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] } }
+{ "v1": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] }, "v2": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.9.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.9.adm
new file mode 100644
index 0000000..8aabb36
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/join/hash_join_array/hash_join_array.9.adm
@@ -0,0 +1,25 @@
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 1, "name": "Margarita", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 2, "name": "Isac", "dept_ids": [ 99, 12, 14, 15, 77 ] } }
+{ "v1": { "id": 3, "name": "Emory", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 4, "name": "Nicholas", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 5, "name": "Von", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 6, "name": "Willis", "dept_ids": [ 1, 5, 6 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 7, "name": "Suzanna", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] }, "v2": { "id": 8, "name": "Nicole", "dept_ids": [ 33, 11, 3, 16 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 5, "name": "Von", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 9, "name": "Woodrow", "dept_ids": [ 3, 2, 8 ] }, "v2": { "id": 11, "name": "Nicholas", "dept_ids": [ 3, 2, 8 ] } }
+{ "v1": { "id": 10, "name": "Bram", "dept_ids": [  ] }, "v2": { "id": 6, "name": "Willis", "dept_ids": [  ] } }
+{ "v1": { "id": 11, "name": "Nicholas", "dept_ids": null } }
+{ "v1": { "id": 12, "name": "John", "dept_ids": [ 2 ] }, "v2": { "id": 16, "name": "Wail", "dept_ids": [ 2 ] } }
+{ "v1": { "id": 13, "name": "Steve", "dept_ids": [ 8, 2 ] } }
+{ "v1": { "id": 14, "name": "Jay", "dept_ids": [ 4 ] }, "v2": { "id": 1, "name": "Margarita", "dept_ids": [ 4 ] } }
+{ "v1": { "id": 15, "name": "Jim", "dept_ids": null } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 12, "name": "John", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 16, "name": "Wail", "dept_ids": [ 9, 5, 2 ] }, "v2": { "id": 14, "name": "Jay", "dept_ids": [ 9, 5, 2 ] } }
+{ "v1": { "id": 17, "name": "Jim" } }
+{ "v1": { "id": 18, "name": "Kayle", "dept_ids": [ 8, 2, 1, 7, 9 ] }, "v2": { "id": 19, "name": "Mart", "dept_ids": [ 8, 2, 1, 7, 9 ] } }
+{ "v1": { "id": 19, "name": "Mart" } }
+{ "v1": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] }, "v2": { "id": 20, "name": "Mai", "dept_ids": [ 1, 5, 9 ] } }
+{ "v1": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] }, "v2": { "id": 21, "name": "Ken", "dept_ids": [ 3, 5, 8 ] } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
index 834b435..d3c66b6 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
@@ -3998,6 +3998,11 @@
   </test-group>
   <test-group name="group-by">
     <test-case FilePath="group-by">
+      <compilation-unit name="gby-array">
+        <output-dir compare="Text">gby-array</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="group-by">
       <compilation-unit name="core-01">
         <output-dir compare="Text">core-01</output-dir>
       </compilation-unit>
@@ -4605,6 +4610,13 @@
       </compilation-unit>
     </test-case>
   </test-group>
+  <test-group name="join">
+    <test-case FilePath="join">
+      <compilation-unit name="hash_join_array">
+        <output-dir compare="Text">hash_join_array</output-dir>
+      </compilation-unit>
+    </test-case>
+  </test-group>
   <test-group name="list">
     <test-case FilePath="list">
       <compilation-unit name="any-collection-member_01">
@@ -10323,6 +10335,11 @@
   </test-group>
   <test-group name="distinct">
     <test-case FilePath="distinct">
+      <compilation-unit name="array">
+        <output-dir compare="Text">array</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="distinct">
       <compilation-unit name="query-issue443">
         <output-dir compare="Text">query-issue443</output-dir>
       </compilation-unit>
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AbstractAGenericBinaryComparator.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AbstractAGenericBinaryComparator.java
index 9a5ae1d..b2759e8 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AbstractAGenericBinaryComparator.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AbstractAGenericBinaryComparator.java
@@ -26,7 +26,6 @@ import java.util.Comparator;
 import java.util.List;
 import java.util.PriorityQueue;
 
-import org.apache.asterix.builders.AbvsBuilderFactory;
 import org.apache.asterix.dataflow.data.common.ListAccessorUtil;
 import org.apache.asterix.om.pointables.ARecordVisitablePointable;
 import org.apache.asterix.om.pointables.PointableAllocator;
@@ -41,10 +40,9 @@ import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.types.TypeTagUtil;
 import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
 import org.apache.asterix.om.types.hierachy.ITypeConvertComputer;
-import org.apache.asterix.om.util.container.IObjectFactory;
 import org.apache.asterix.om.util.container.IObjectPool;
 import org.apache.asterix.om.util.container.ListObjectPool;
-import org.apache.asterix.om.utils.NonTaggedFormatUtil;
+import org.apache.asterix.om.util.container.ObjectFactories;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -57,7 +55,6 @@ import org.apache.hyracks.data.std.primitive.FloatPointable;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
 import org.apache.hyracks.data.std.primitive.ShortPointable;
 import org.apache.hyracks.data.std.primitive.UTF8StringPointable;
-import org.apache.hyracks.data.std.primitive.VoidPointable;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 
 abstract class AbstractAGenericBinaryComparator implements IBinaryComparator {
@@ -115,16 +112,11 @@ abstract class AbstractAGenericBinaryComparator implements IBinaryComparator {
     // RAW
     private final IBinaryComparator rawComp = RawBinaryComparatorFactory.INSTANCE.createBinaryComparator();
 
-    // a storage to promote a value
-    private final ArrayBackedValueStorage castBuffer;
-
-    // TODO(ali): extract IObjectFactory factories into a separate unit and share
-    private static final IObjectFactory<IPointable, Void> VOID_FACTORY = (type) -> new VoidPointable();
-    private static final IObjectFactory<IMutableValueStorage, ATypeTag> STORAGE_FACTORY = new AbvsBuilderFactory();
-
     // these fields can be null
     protected final IAType leftType;
     protected final IAType rightType;
+    // a storage to promote a value
+    private final ArrayBackedValueStorage castBuffer;
     private final IObjectPool<IMutableValueStorage, ATypeTag> storageAllocator;
     private final IObjectPool<IPointable, Void> voidPointableAllocator;
     // used for record comparison, sorting field names
@@ -137,8 +129,8 @@ abstract class AbstractAGenericBinaryComparator implements IBinaryComparator {
         this.leftType = leftType;
         this.rightType = rightType;
         this.castBuffer = new ArrayBackedValueStorage();
-        this.storageAllocator = new ListObjectPool<>(STORAGE_FACTORY);
-        this.voidPointableAllocator = new ListObjectPool<>(VOID_FACTORY);
+        this.storageAllocator = new ListObjectPool<>(ObjectFactories.STORAGE_FACTORY);
+        this.voidPointableAllocator = new ListObjectPool<>(ObjectFactories.VOID_FACTORY);
         this.recordAllocator = new PointableAllocator();
         this.fieldNamesComparator = createFieldNamesComp(ascStrComp);
         this.heapAllocator = new ListObjectPool<>((type) -> new PriorityQueue<>(fieldNamesComparator));
@@ -360,8 +352,8 @@ abstract class AbstractAGenericBinaryComparator implements IBinaryComparator {
         }
         int leftNumItems = ListAccessorUtil.numberOfItems(b1, s1);
         int rightNumItems = ListAccessorUtil.numberOfItems(b2, s2);
-        IAType leftArrayType = getActualTypeOrOpen(leftType, ATypeTag.ARRAY);
-        IAType rightArrayType = getActualTypeOrOpen(rightType, ATypeTag.ARRAY);
+        IAType leftArrayType = TypeComputeUtils.getActualTypeOrOpen(leftType, ATypeTag.ARRAY);
+        IAType rightArrayType = TypeComputeUtils.getActualTypeOrOpen(rightType, ATypeTag.ARRAY);
         IAType leftItemType = ((AbstractCollectionType) leftArrayType).getItemType();
         IAType rightItemType = ((AbstractCollectionType) rightArrayType).getItemType();
         ATypeTag leftItemTag = leftItemType.getTypeTag();
@@ -399,8 +391,8 @@ abstract class AbstractAGenericBinaryComparator implements IBinaryComparator {
         if (leftType == null || rightType == null) {
             return rawComp.compare(b1, s1, l1, b2, s2, l2);
         }
-        ARecordType leftRecordType = (ARecordType) getActualTypeOrOpen(leftType, ATypeTag.OBJECT);
-        ARecordType rightRecordType = (ARecordType) getActualTypeOrOpen(rightType, ATypeTag.OBJECT);
+        ARecordType leftRecordType = (ARecordType) TypeComputeUtils.getActualTypeOrOpen(leftType, ATypeTag.OBJECT);
+        ARecordType rightRecordType = (ARecordType) TypeComputeUtils.getActualTypeOrOpen(rightType, ATypeTag.OBJECT);
         ARecordVisitablePointable leftRecord = recordAllocator.allocateRecordValue(leftRecordType);
         ARecordVisitablePointable rightRecord = recordAllocator.allocateRecordValue(rightRecordType);
         PriorityQueue<IVisitablePointable> leftNamesHeap = null, rightNamesHeap = null;
@@ -467,11 +459,6 @@ abstract class AbstractAGenericBinaryComparator implements IBinaryComparator {
         }
     }
 
-    private static IAType getActualTypeOrOpen(IAType type, ATypeTag tag) {
-        IAType actualType = TypeComputeUtils.getActualType(type);
-        return actualType.getTypeTag() == ATypeTag.ANY ? DefaultOpenFieldType.getDefaultOpenFieldType(tag) : actualType;
-    }
-
     private static int addToHeap(List<IVisitablePointable> recordFNames, List<IVisitablePointable> recordFValues,
             PriorityQueue<IVisitablePointable> names) {
         // do not add fields whose value is missing, they don't exist in reality
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/hash/AMurmurHash3BinaryHashFunctionFamily.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/hash/AMurmurHash3BinaryHashFunctionFamily.java
index 5f5ff76..0f0eb11 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/hash/AMurmurHash3BinaryHashFunctionFamily.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/hash/AMurmurHash3BinaryHashFunctionFamily.java
@@ -21,88 +21,137 @@ package org.apache.asterix.dataflow.data.nontagged.hash;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.asterix.dataflow.data.common.ListAccessorUtil;
+import org.apache.asterix.om.typecomputer.impl.TypeComputeUtils;
 import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.AbstractCollectionType;
 import org.apache.asterix.om.types.EnumDeserializer;
+import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.types.hierachy.FloatToDoubleTypeConvertComputer;
 import org.apache.asterix.om.types.hierachy.IntegerToDoubleTypeConvertComputer;
+import org.apache.asterix.om.util.container.IObjectPool;
+import org.apache.asterix.om.util.container.ListObjectPool;
+import org.apache.asterix.om.util.container.ObjectFactories;
 import org.apache.hyracks.api.dataflow.value.IBinaryHashFunction;
 import org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.accessors.MurmurHash3BinaryHash;
+import org.apache.hyracks.data.std.api.IMutableValueStorage;
+import org.apache.hyracks.data.std.api.IPointable;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 
 public class AMurmurHash3BinaryHashFunctionFamily implements IBinaryHashFunctionFamily {
 
-    public static final IBinaryHashFunctionFamily INSTANCE = new AMurmurHash3BinaryHashFunctionFamily();
-
     private static final long serialVersionUID = 1L;
+    private final IAType type;
 
-    private AMurmurHash3BinaryHashFunctionFamily() {
+    public AMurmurHash3BinaryHashFunctionFamily(IAType type) {
+        this.type = type;
     }
 
-    // This hash function family is used to promote a numeric type to a DOUBLE numeric type
-    // to return same hash value for the original numeric value, regardless of the numeric type.
-    // (e.g., h( int64("1") )  =  h( double("1.0") )
+    public static IBinaryHashFunction createBinaryHashFunction(IAType type, int seed) {
+        return new GenericHashFunction(type, seed);
+    }
 
+    /**
+     * The returned hash function is used to promote a numeric type to a DOUBLE numeric type to return same hash value
+     * for the original numeric value, regardless of the numeric type. (e.g., h( int64("1") )  =  h( double("1.0") )
+     *
+     * @param seed seed to be used by the hash function created
+     *
+     * @return a generic hash function
+     */
     @Override
     public IBinaryHashFunction createBinaryHashFunction(final int seed) {
-        return new IBinaryHashFunction() {
-
-            private ArrayBackedValueStorage fieldValueBuffer = new ArrayBackedValueStorage();
-            private DataOutput fieldValueBufferOutput = fieldValueBuffer.getDataOutput();
-            private ATypeTag sourceTag = null;
-            private boolean numericTypePromotionApplied = false;
-
-            @Override
-            public int hash(byte[] bytes, int offset, int length) throws HyracksDataException {
-
-                // If a numeric type is encountered, then we promote each numeric type to the DOUBLE type.
-                fieldValueBuffer.reset();
-                sourceTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[offset]);
-
-                switch (sourceTag) {
-                    case TINYINT:
-                    case SMALLINT:
-                    case INTEGER:
-                    case BIGINT:
-                        try {
-                            IntegerToDoubleTypeConvertComputer.getInstance().convertType(bytes, offset + 1, length - 1,
-                                    fieldValueBufferOutput);
-                        } catch (IOException e) {
-                            throw new HyracksDataException(
-                                    "A numeric type promotion error has occurred before doing hash(). Can't continue process. Detailed Error message:"
-                                            + e.getMessage());
-                        }
-                        numericTypePromotionApplied = true;
-                        break;
-
-                    case FLOAT:
-                        try {
-                            FloatToDoubleTypeConvertComputer.getInstance().convertType(bytes, offset + 1, length - 1,
-                                    fieldValueBufferOutput);
-                        } catch (IOException e) {
-                            throw new HyracksDataException(
-                                    "A numeric type promotion error has occurred before doing hash(). Can't continue process. Detailed Error message:"
-                                            + e.getMessage());
-                        }
-                        numericTypePromotionApplied = true;
-                        break;
-
-                    default:
-                        numericTypePromotionApplied = false;
-                        break;
-                }
-
-                // If a numeric type promotion happened
-                if (numericTypePromotionApplied) {
-                    return MurmurHash3BinaryHash.hash(fieldValueBuffer.getByteArray(),
-                            fieldValueBuffer.getStartOffset(), fieldValueBuffer.getLength(), seed);
+        return new GenericHashFunction(type, seed);
+    }
 
-                } else {
-                    // Usual case for non numeric types and the DOBULE numeric type
+    private static final class GenericHashFunction implements IBinaryHashFunction {
+
+        private final ArrayBackedValueStorage valueBuffer = new ArrayBackedValueStorage();
+        private final DataOutput valueOut = valueBuffer.getDataOutput();
+        private final IObjectPool<IPointable, Void> voidPointableAllocator;
+        private final IObjectPool<IMutableValueStorage, ATypeTag> storageAllocator;
+        private final IAType type;
+        private final int seed;
+
+        private GenericHashFunction(IAType type, int seed) {
+            this.type = type;
+            this.seed = seed;
+            this.voidPointableAllocator = new ListObjectPool<>(ObjectFactories.VOID_FACTORY);
+            this.storageAllocator = new ListObjectPool<>(ObjectFactories.STORAGE_FACTORY);
+        }
+
+        @Override
+        public int hash(byte[] bytes, int offset, int length) throws HyracksDataException {
+            return hash(type, bytes, offset, length);
+        }
+
+        private int hash(IAType type, byte[] bytes, int offset, int length) throws HyracksDataException {
+            // if a numeric type is encountered, then we promote each numeric type to the DOUBLE type.
+            valueBuffer.reset();
+            ATypeTag sourceTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[offset]);
+
+            switch (sourceTag) {
+                case TINYINT:
+                case SMALLINT:
+                case INTEGER:
+                case BIGINT:
+                    try {
+                        IntegerToDoubleTypeConvertComputer.getInstance().convertType(bytes, offset + 1, length - 1,
+                                valueOut);
+                    } catch (IOException e) {
+                        throw HyracksDataException.create(ErrorCode.NUMERIC_PROMOTION_ERROR, e.getMessage());
+                    }
+                    return MurmurHash3BinaryHash.hash(valueBuffer.getByteArray(), valueBuffer.getStartOffset(),
+                            valueBuffer.getLength(), seed);
+
+                case FLOAT:
+                    try {
+                        FloatToDoubleTypeConvertComputer.getInstance().convertType(bytes, offset + 1, length - 1,
+                                valueOut);
+                    } catch (IOException e) {
+                        throw HyracksDataException.create(ErrorCode.NUMERIC_PROMOTION_ERROR, e.getMessage());
+                    }
+                    return MurmurHash3BinaryHash.hash(valueBuffer.getByteArray(), valueBuffer.getStartOffset(),
+                            valueBuffer.getLength(), seed);
+
+                case DOUBLE:
                     return MurmurHash3BinaryHash.hash(bytes, offset, length, seed);
+                case ARRAY:
+                    try {
+                        return hashArray(type, bytes, offset, length, seed);
+                    } catch (IOException e) {
+                        throw HyracksDataException.create(e);
+                    }
+                default:
+                    return MurmurHash3BinaryHash.hash(bytes, offset, length, seed);
+            }
+        }
+
+        private int hashArray(IAType type, byte[] bytes, int offset, int length, int seed) throws IOException {
+            if (type == null) {
+                return MurmurHash3BinaryHash.hash(bytes, offset, length, seed);
+            }
+            IAType arrayType = TypeComputeUtils.getActualTypeOrOpen(type, ATypeTag.ARRAY);
+            IAType itemType = ((AbstractCollectionType) arrayType).getItemType();
+            ATypeTag itemTag = itemType.getTypeTag();
+            int numItems = ListAccessorUtil.numberOfItems(bytes, offset);
+            int hash = 0;
+            IPointable item = voidPointableAllocator.allocate(null);
+            ArrayBackedValueStorage storage = (ArrayBackedValueStorage) storageAllocator.allocate(null);
+            try {
+                for (int i = 0; i < numItems; i++) {
+                    ListAccessorUtil.getItem(bytes, offset, i, ATypeTag.ARRAY, itemTag, item, storage);
+                    hash ^= hash(itemType, item.getByteArray(), item.getStartOffset(), item.getLength());
                 }
+            } finally {
+                voidPointableAllocator.free(item);
+                storageAllocator.free(storage);
             }
-        };
+
+            return hash;
+        }
     }
 }
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryHashFunctionFactoryProvider.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryHashFunctionFactoryProvider.java
index 6365860..c3cbae3 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryHashFunctionFactoryProvider.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryHashFunctionFactoryProvider.java
@@ -21,6 +21,7 @@ package org.apache.asterix.formats.nontagged;
 import java.io.Serializable;
 
 import org.apache.asterix.dataflow.data.nontagged.hash.AMurmurHash3BinaryHashFunctionFamily;
+import org.apache.asterix.om.types.IAType;
 import org.apache.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
 import org.apache.hyracks.api.dataflow.value.IBinaryHashFunction;
 import org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
@@ -45,8 +46,8 @@ public class BinaryHashFunctionFactoryProvider implements IBinaryHashFunctionFac
 
             @Override
             public IBinaryHashFunction createBinaryHashFunction() {
-                // Actual numeric type promotion happens in the createBinaryHashFunction()
-                return AMurmurHash3BinaryHashFunctionFamily.INSTANCE.createBinaryHashFunction(0);
+                // actual numeric type promotion happens in the createBinaryHashFunction()
+                return AMurmurHash3BinaryHashFunctionFamily.createBinaryHashFunction((IAType) type, 0);
             }
         };
     }
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryHashFunctionFamilyProvider.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryHashFunctionFamilyProvider.java
index 761d1a3..abe5b20 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryHashFunctionFamilyProvider.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryHashFunctionFamilyProvider.java
@@ -16,19 +16,17 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-
 package org.apache.asterix.formats.nontagged;
 
 import java.io.Serializable;
 
 import org.apache.asterix.dataflow.data.nontagged.hash.AMurmurHash3BinaryHashFunctionFamily;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.asterix.om.types.IAType;
 import org.apache.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
 import org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
 
 /**
- * We use a binary hash function that promotes numeric types (tinyint,smallint,integer,bigint,float) to double
- * if requested.
+ * We use a binary hash function that promotes numeric types (tinyint,smallint,integer,bigint,float) to double.
  * Non-numeric types will be hashed without type promotion.
  */
 public class BinaryHashFunctionFamilyProvider implements IBinaryHashFunctionFamilyProvider, Serializable {
@@ -37,13 +35,11 @@ public class BinaryHashFunctionFamilyProvider implements IBinaryHashFunctionFami
     public static final BinaryHashFunctionFamilyProvider INSTANCE = new BinaryHashFunctionFamilyProvider();
 
     private BinaryHashFunctionFamilyProvider() {
-
     }
 
     @Override
-    public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type) throws AlgebricksException {
+    public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type) {
         // AMurmurHash3BinaryHashFunctionFamily converts numeric type to double type before doing hash()
-        return AMurmurHash3BinaryHashFunctionFamily.INSTANCE;
+        return new AMurmurHash3BinaryHashFunctionFamily((IAType) type);
     }
-
 }
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/TypeComputeUtils.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/TypeComputeUtils.java
index e40975b..4330767 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/TypeComputeUtils.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/TypeComputeUtils.java
@@ -20,6 +20,7 @@ package org.apache.asterix.om.typecomputer.impl;
 
 import java.util.List;
 
+import org.apache.asterix.om.pointables.base.DefaultOpenFieldType;
 import org.apache.asterix.om.types.AOrderedListType;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
@@ -236,4 +237,9 @@ public class TypeComputeUtils {
         return null;
     }
 
+    // this is for complex types. it will return null when asking for a default type for a primitive tag
+    public static IAType getActualTypeOrOpen(IAType type, ATypeTag tag) {
+        IAType actualType = TypeComputeUtils.getActualType(type);
+        return actualType.getTypeTag() == ATypeTag.ANY ? DefaultOpenFieldType.getDefaultOpenFieldType(tag) : actualType;
+    }
 }
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/container/IObjectFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/container/IObjectFactory.java
index 48dcbfb..d3adca2 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/container/IObjectFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/container/IObjectFactory.java
@@ -16,19 +16,20 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-
 package org.apache.asterix.om.util.container;
 
 /**
  * A factory interface to create objects.
  */
+@FunctionalInterface
 public interface IObjectFactory<E, T> {
 
     /**
-     * create an element of type E
+     * Creates an element of type E.
+     *
+     * @param arg additional argument to create the proper element.
      *
-     * @param arg
-     * @return an E element
+     * @return an E element.
      */
-    public E create(T arg);
+    E create(T arg);
 }
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/container/IObjectFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/container/ObjectFactories.java
similarity index 55%
copy from asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/container/IObjectFactory.java
copy to asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/container/ObjectFactories.java
index 48dcbfb..e9cb345 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/container/IObjectFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/container/ObjectFactories.java
@@ -16,19 +16,21 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-
 package org.apache.asterix.om.util.container;
 
-/**
- * A factory interface to create objects.
- */
-public interface IObjectFactory<E, T> {
+import org.apache.asterix.builders.AbvsBuilderFactory;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.hyracks.data.std.api.IMutableValueStorage;
+import org.apache.hyracks.data.std.api.IPointable;
+import org.apache.hyracks.data.std.primitive.VoidPointable;
+
+// TODO(ali): look for all classes creating factories and extract them to here
+public class ObjectFactories {
+
+    private ObjectFactories() {
+    }
 
-    /**
-     * create an element of type E
-     *
-     * @param arg
-     * @return an E element
-     */
-    public E create(T arg);
+    public static final IObjectFactory<IPointable, Void> VOID_FACTORY = (type) -> new VoidPointable();
+    // TODO(ali): use lambda for the storage, too
+    public static final IObjectFactory<IMutableValueStorage, ATypeTag> STORAGE_FACTORY = new AbvsBuilderFactory();
 }
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
index 6d8cf30..6d8b6e3 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
@@ -114,8 +114,10 @@ public class HybridHashJoinPOperator extends AbstractHashJoinPOperator {
         int[] keysLeft = JobGenHelper.variablesToFieldIndexes(keysLeftBranch, inputSchemas[0]);
         int[] keysRight = JobGenHelper.variablesToFieldIndexes(keysRightBranch, inputSchemas[1]);
         IVariableTypeEnvironment env = context.getTypeEnvironment(op);
-        IBinaryHashFunctionFamily[] hashFunFamilies =
+        IBinaryHashFunctionFamily[] leftHashFunFamilies =
                 JobGenHelper.variablesToBinaryHashFunctionFamilies(keysLeftBranch, env, context);
+        IBinaryHashFunctionFamily[] rightHashFunFamilies =
+                JobGenHelper.variablesToBinaryHashFunctionFamilies(keysRightBranch, env, context);
         IBinaryComparatorFactory[] leftCompFactories = new IBinaryComparatorFactory[keysLeft.length];
         IBinaryComparatorFactory[] rightCompFactories = new IBinaryComparatorFactory[keysRight.length];
         IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
@@ -138,8 +140,8 @@ public class HybridHashJoinPOperator extends AbstractHashJoinPOperator {
         IOperatorDescriptorRegistry spec = builder.getJobSpec();
         IOperatorDescriptor opDesc;
 
-        opDesc = generateOptimizedHashJoinRuntime(context, inputSchemas, keysLeft, keysRight, hashFunFamilies,
-                leftCompFactories, rightCompFactories, predEvaluatorFactory, recDescriptor, spec);
+        opDesc = generateOptimizedHashJoinRuntime(context, inputSchemas, keysLeft, keysRight, leftHashFunFamilies,
+                rightHashFunFamilies, leftCompFactories, rightCompFactories, predEvaluatorFactory, recDescriptor, spec);
         opDesc.setSourceLocation(op.getSourceLocation());
         contributeOpDesc(builder, (AbstractLogicalOperator) op, opDesc);
 
@@ -150,15 +152,15 @@ public class HybridHashJoinPOperator extends AbstractHashJoinPOperator {
     }
 
     private IOperatorDescriptor generateOptimizedHashJoinRuntime(JobGenContext context, IOperatorSchema[] inputSchemas,
-            int[] keysLeft, int[] keysRight, IBinaryHashFunctionFamily[] hashFunFamilies,
-            IBinaryComparatorFactory[] leftCompFactories, IBinaryComparatorFactory[] rightCompFactories,
-            IPredicateEvaluatorFactory predEvaluatorFactory, RecordDescriptor recDescriptor,
-            IOperatorDescriptorRegistry spec) {
+            int[] keysLeft, int[] keysRight, IBinaryHashFunctionFamily[] leftHashFunFamilies,
+            IBinaryHashFunctionFamily[] rightHashFunFamilies, IBinaryComparatorFactory[] leftCompFactories,
+            IBinaryComparatorFactory[] rightCompFactories, IPredicateEvaluatorFactory predEvaluatorFactory,
+            RecordDescriptor recDescriptor, IOperatorDescriptorRegistry spec) {
         switch (kind) {
             case INNER:
                 return new OptimizedHybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(),
-                        maxInputBuildSizeInFrames, getFudgeFactor(), keysLeft, keysRight, hashFunFamilies,
-                        leftCompFactories, rightCompFactories, recDescriptor,
+                        maxInputBuildSizeInFrames, getFudgeFactor(), keysLeft, keysRight, leftHashFunFamilies,
+                        rightHashFunFamilies, leftCompFactories, rightCompFactories, recDescriptor,
                         new JoinMultiComparatorFactory(leftCompFactories, keysLeft, keysRight),
                         new JoinMultiComparatorFactory(rightCompFactories, keysRight, keysLeft), predEvaluatorFactory);
             case LEFT_OUTER:
@@ -167,8 +169,8 @@ public class HybridHashJoinPOperator extends AbstractHashJoinPOperator {
                     nonMatchWriterFactories[j] = context.getMissingWriterFactory();
                 }
                 return new OptimizedHybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(),
-                        maxInputBuildSizeInFrames, getFudgeFactor(), keysLeft, keysRight, hashFunFamilies,
-                        leftCompFactories, rightCompFactories, recDescriptor,
+                        maxInputBuildSizeInFrames, getFudgeFactor(), keysLeft, keysRight, leftHashFunFamilies,
+                        rightHashFunFamilies, leftCompFactories, rightCompFactories, recDescriptor,
                         new JoinMultiComparatorFactory(leftCompFactories, keysLeft, keysRight),
                         new JoinMultiComparatorFactory(rightCompFactories, keysRight, keysLeft), predEvaluatorFactory,
                         true, nonMatchWriterFactories);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java
index 58e10ba..152fcc6 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java
@@ -88,8 +88,10 @@ public class InMemoryHashJoinPOperator extends AbstractHashJoinPOperator {
         int[] keysLeft = JobGenHelper.variablesToFieldIndexes(keysLeftBranch, inputSchemas[0]);
         int[] keysRight = JobGenHelper.variablesToFieldIndexes(keysRightBranch, inputSchemas[1]);
         IVariableTypeEnvironment env = context.getTypeEnvironment(op);
-        IBinaryHashFunctionFactory[] hashFunFactories =
+        IBinaryHashFunctionFactory[] leftHashFunFactories =
                 JobGenHelper.variablesToBinaryHashFunctionFactories(keysLeftBranch, env, context);
+        IBinaryHashFunctionFactory[] rightHashFunFactories =
+                JobGenHelper.variablesToBinaryHashFunctionFactories(keysRightBranch, env, context);
         IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[keysLeft.length];
         IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
         Object leftType;
@@ -112,17 +114,18 @@ public class InMemoryHashJoinPOperator extends AbstractHashJoinPOperator {
 
         switch (kind) {
             case INNER:
-                opDesc = new InMemoryHashJoinOperatorDescriptor(spec, keysLeft, keysRight, hashFunFactories,
-                        comparatorFactories, recDescriptor, tableSize, predEvaluatorFactory, memSizeInFrames);
+                opDesc = new InMemoryHashJoinOperatorDescriptor(spec, keysLeft, keysRight, leftHashFunFactories,
+                        rightHashFunFactories, comparatorFactories, recDescriptor, tableSize, predEvaluatorFactory,
+                        memSizeInFrames);
                 break;
             case LEFT_OUTER:
                 IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[inputSchemas[1].getSize()];
                 for (int j = 0; j < nonMatchWriterFactories.length; j++) {
                     nonMatchWriterFactories[j] = context.getMissingWriterFactory();
                 }
-                opDesc = new InMemoryHashJoinOperatorDescriptor(spec, keysLeft, keysRight, hashFunFactories,
-                        comparatorFactories, predEvaluatorFactory, recDescriptor, true, nonMatchWriterFactories,
-                        tableSize, memSizeInFrames);
+                opDesc = new InMemoryHashJoinOperatorDescriptor(spec, keysLeft, keysRight, leftHashFunFactories,
+                        rightHashFunFactories, comparatorFactories, predEvaluatorFactory, recDescriptor, true,
+                        nonMatchWriterFactories, tableSize, memSizeInFrames);
                 break;
             default:
                 throw new NotImplementedException();
diff --git a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFactoryProvider.java b/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFactoryProvider.java
index d853f83..d6e808c 100644
--- a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFactoryProvider.java
+++ b/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFactoryProvider.java
@@ -18,9 +18,22 @@
  */
 package org.apache.hyracks.algebricks.data;
 
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
 
+/**
+ * Ideally, {@code IBinaryHashFunctionFactoryProvider} should be stateless and thread-safe. Also, it should be made into
+ * a singleton. However, this is implementation-dependent.
+ */
 public interface IBinaryHashFunctionFactoryProvider {
-    public IBinaryHashFunctionFactory getBinaryHashFunctionFactory(Object type) throws AlgebricksException;
+
+    /**
+     * Whether a singleton factory instance is returned or a new factory instance is created is implementation-specific.
+     * Therefore, no assumption should be made in this regard.
+     * TODO: some existing implementations create a new factory instance
+     *
+     * @param type the type of the data that will be hashed.
+     *
+     * @return a {@link IBinaryHashFunctionFactory} instance.
+     */
+    IBinaryHashFunctionFactory getBinaryHashFunctionFactory(Object type);
 }
diff --git a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java b/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
index 93dd3d5..322bbcb 100644
--- a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
+++ b/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
@@ -16,13 +16,24 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-
 package org.apache.hyracks.algebricks.data;
 
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
 
+/**
+ * Ideally, {@code IBinaryHashFunctionFamilyProvider} should be stateless and thread-safe. Also, it should be made into
+ * a singleton. However, this is implementation-dependent.
+ */
 public interface IBinaryHashFunctionFamilyProvider {
 
-    public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type) throws AlgebricksException;
+    /**
+     * Whether a singleton factory instance is returned or a new factory instance is created is implementation-specific.
+     * Therefore, no assumption should be made in this regard.
+     * TODO: some existing implementations create a new factory instance
+     *
+     * @param type the type of the data that will be hashed.
+     *
+     * @return a {@link IBinaryHashFunctionFamily} instance.
+     */
+    IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type);
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunction.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunction.java
index 8205497..8d45cf6 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunction.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunction.java
@@ -20,6 +20,12 @@ package org.apache.hyracks.api.dataflow.value;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
+/**
+ * Ideally, {@code IBinaryHashFunction} should be stateless and thread-safe. Also, it should be made into a singleton.
+ * However, this is implementation-dependent.
+ * TODO: current implementations are stateful and are not thread-safe.
+ */
 public interface IBinaryHashFunction {
+
     int hash(byte[] bytes, int offset, int length) throws HyracksDataException;
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
index e512416..c8681ef 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
@@ -20,6 +20,18 @@ package org.apache.hyracks.api.dataflow.value;
 
 import java.io.Serializable;
 
+/**
+ * Ideally, {@code IBinaryHashFunctionFactory} should be stateless and thread-safe. Also, it should be made into
+ * a singleton. However, this is implementation-dependant.
+ * TODO: some existing implementations are not singleton
+ */
 public interface IBinaryHashFunctionFactory extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction();
+
+    /**
+     * Whether a singleton hash function instance is returned or a new hash function instance is created is
+     * implementation-specific. Therefore, no assumption should be made in this regard.
+     *
+     * @return a {@link IBinaryHashFunction} instance.
+     */
+    IBinaryHashFunction createBinaryHashFunction();
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFamily.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFamily.java
index 35fea9f..b22e75a 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFamily.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/IBinaryHashFunctionFamily.java
@@ -20,6 +20,20 @@ package org.apache.hyracks.api.dataflow.value;
 
 import java.io.Serializable;
 
+/**
+ * Ideally, {@code IBinaryHashFunctionFamily} should be stateless and thread-safe. Also, it should be made into
+ * a singleton. However, this is implementation-dependent.
+ * TODO: some existing implementations are not singleton and are stateful
+ */
 public interface IBinaryHashFunctionFamily extends Serializable {
-    public IBinaryHashFunction createBinaryHashFunction(int seed);
+
+    /**
+     * Whether a singleton hash function instance is returned or a new hash function instance is created is
+     * implementation-specific. Therefore, no assumption should be made in this regard.
+     *
+     * @param seed seed to be used by the hash function created
+     *
+     * @return a {@link IBinaryHashFunction} instance.
+     */
+    IBinaryHashFunction createBinaryHashFunction(int seed);
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
index 4ff0df3..bf34664 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
@@ -154,6 +154,7 @@ public class ErrorCode {
     public static final int RANGEMAP_NOT_FOUND = 118;
     public static final int UNSUPPORTED_WINDOW_SPEC = 119;
     public static final int EOF = 120;
+    public static final int NUMERIC_PROMOTION_ERROR = 121;
 
     // Compilation error codes.
     public static final int RULECOLLECTION_NOT_INSTANCE_OF_LIST = 10000;
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties b/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
index 5c9863c..b4f7973 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
@@ -137,6 +137,7 @@
 118 = Range map was not found for parallel sort
 119 = Unsupported window specification: PARTITION BY %1$s, ORDER BY %2$s
 120 = End of file
+121 = A numeric type promotion error has occurred: %1$s
 
 10000 = The given rule collection %1$s is not an instance of the List class.
 10001 = Cannot compose partition constraint %1$s with %2$s
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
index a5c17f2..9ef36cf 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
@@ -61,7 +61,8 @@ public class InMemoryHashJoinOperatorDescriptor extends AbstractOperatorDescript
     private static final long serialVersionUID = 1L;
     private final int[] keys0;
     private final int[] keys1;
-    private final IBinaryHashFunctionFactory[] hashFunctionFactories;
+    private final IBinaryHashFunctionFactory[] hashFunctionFactories0;
+    private final IBinaryHashFunctionFactory[] hashFunctionFactories1;
     private final IBinaryComparatorFactory[] comparatorFactories;
     private final IPredicateEvaluatorFactory predEvaluatorFactory;
     private final boolean isLeftOuter;
@@ -71,13 +72,14 @@ public class InMemoryHashJoinOperatorDescriptor extends AbstractOperatorDescript
     private final int memSizeInFrames;
 
     public InMemoryHashJoinOperatorDescriptor(IOperatorDescriptorRegistry spec, int[] keys0, int[] keys1,
-            IBinaryHashFunctionFactory[] hashFunctionFactories, IBinaryComparatorFactory[] comparatorFactories,
-            RecordDescriptor recordDescriptor, int tableSize, IPredicateEvaluatorFactory predEvalFactory,
-            int memSizeInFrames) {
+            IBinaryHashFunctionFactory[] hashFunctionFactories0, IBinaryHashFunctionFactory[] hashFunctionFactories1,
+            IBinaryComparatorFactory[] comparatorFactories, RecordDescriptor recordDescriptor, int tableSize,
+            IPredicateEvaluatorFactory predEvalFactory, int memSizeInFrames) {
         super(spec, 2, 1);
         this.keys0 = keys0;
         this.keys1 = keys1;
-        this.hashFunctionFactories = hashFunctionFactories;
+        this.hashFunctionFactories0 = hashFunctionFactories0;
+        this.hashFunctionFactories1 = hashFunctionFactories1;
         this.comparatorFactories = comparatorFactories;
         this.predEvaluatorFactory = predEvalFactory;
         outRecDescs[0] = recordDescriptor;
@@ -88,13 +90,15 @@ public class InMemoryHashJoinOperatorDescriptor extends AbstractOperatorDescript
     }
 
     public InMemoryHashJoinOperatorDescriptor(IOperatorDescriptorRegistry spec, int[] keys0, int[] keys1,
-            IBinaryHashFunctionFactory[] hashFunctionFactories, IBinaryComparatorFactory[] comparatorFactories,
-            IPredicateEvaluatorFactory predEvalFactory, RecordDescriptor recordDescriptor, boolean isLeftOuter,
-            IMissingWriterFactory[] missingWriterFactories1, int tableSize, int memSizeInFrames) {
+            IBinaryHashFunctionFactory[] hashFunctionFactories0, IBinaryHashFunctionFactory[] hashFunctionFactories1,
+            IBinaryComparatorFactory[] comparatorFactories, IPredicateEvaluatorFactory predEvalFactory,
+            RecordDescriptor recordDescriptor, boolean isLeftOuter, IMissingWriterFactory[] missingWriterFactories1,
+            int tableSize, int memSizeInFrames) {
         super(spec, 2, 1);
         this.keys0 = keys0;
         this.keys1 = keys1;
-        this.hashFunctionFactories = hashFunctionFactories;
+        this.hashFunctionFactories0 = hashFunctionFactories0;
+        this.hashFunctionFactories1 = hashFunctionFactories1;
         this.comparatorFactories = comparatorFactories;
         this.predEvaluatorFactory = predEvalFactory;
         outRecDescs[0] = recordDescriptor;
@@ -182,9 +186,9 @@ public class InMemoryHashJoinOperatorDescriptor extends AbstractOperatorDescript
                 @Override
                 public void open() throws HyracksDataException {
                     ITuplePartitionComputer hpc0 =
-                            new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories).createPartitioner(ctx);
+                            new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories0).createPartitioner(ctx);
                     ITuplePartitionComputer hpc1 =
-                            new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories).createPartitioner(ctx);
+                            new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories1).createPartitioner(ctx);
                     state = new HashBuildTaskState(ctx.getJobletContext().getJobId(),
                             new TaskId(getActivityId(), partition));
                     ISerializableTable table = new SerializableHashTable(tableSize, ctx, bufferManager);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
index 2452ae5..403c492 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
@@ -123,7 +123,8 @@ public class OptimizedHybridHashJoinOperatorDescriptor extends AbstractOperatorD
     private final double fudgeFactor;
     private final int[] probeKeys;
     private final int[] buildKeys;
-    private final IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories;
+    private final IBinaryHashFunctionFamily[] propHashFunctionFactories;
+    private final IBinaryHashFunctionFamily[] buildHashFunctionFactories;
     private final IBinaryComparatorFactory[] probCompFactories; //For in-mem HJ
     private final IBinaryComparatorFactory[] buildCompFactories; //For in-mem HJ
     private final ITuplePairComparatorFactory tuplePairComparatorFactoryProbe2Build; //For NLJ in probe
@@ -142,7 +143,8 @@ public class OptimizedHybridHashJoinOperatorDescriptor extends AbstractOperatorD
 
     public OptimizedHybridHashJoinOperatorDescriptor(IOperatorDescriptorRegistry spec, int memSizeInFrames,
             int inputsize0, double factor, int[] keys0, int[] keys1,
-            IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories, IBinaryComparatorFactory[] probCompFactories,
+            IBinaryHashFunctionFamily[] propHashFunctionFactories,
+            IBinaryHashFunctionFamily[] buildHashFunctionFactories, IBinaryComparatorFactory[] probCompFactories,
             IBinaryComparatorFactory[] buildCompFactories, RecordDescriptor recordDescriptor,
             ITuplePairComparatorFactory tupPaircomparatorFactory01,
             ITuplePairComparatorFactory tupPaircomparatorFactory10, IPredicateEvaluatorFactory predEvaluatorFactory,
@@ -153,7 +155,8 @@ public class OptimizedHybridHashJoinOperatorDescriptor extends AbstractOperatorD
         this.fudgeFactor = factor;
         this.probeKeys = keys0;
         this.buildKeys = keys1;
-        this.hashFunctionGeneratorFactories = hashFunctionGeneratorFactories;
+        this.propHashFunctionFactories = propHashFunctionFactories;
+        this.buildHashFunctionFactories = buildHashFunctionFactories;
         this.probCompFactories = probCompFactories;
         this.buildCompFactories = buildCompFactories;
         this.tuplePairComparatorFactoryProbe2Build = tupPaircomparatorFactory01;
@@ -166,13 +169,14 @@ public class OptimizedHybridHashJoinOperatorDescriptor extends AbstractOperatorD
 
     public OptimizedHybridHashJoinOperatorDescriptor(IOperatorDescriptorRegistry spec, int memSizeInFrames,
             int inputsize0, double factor, int[] keys0, int[] keys1,
-            IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories, IBinaryComparatorFactory[] probCompFactories,
+            IBinaryHashFunctionFamily[] propHashFunctionFactories,
+            IBinaryHashFunctionFamily[] buildHashFunctionFactories, IBinaryComparatorFactory[] probCompFactories,
             IBinaryComparatorFactory[] buildCompFactories, RecordDescriptor recordDescriptor,
             ITuplePairComparatorFactory tupPaircomparatorFactory01,
             ITuplePairComparatorFactory tupPaircomparatorFactory10, IPredicateEvaluatorFactory predEvaluatorFactory) {
-        this(spec, memSizeInFrames, inputsize0, factor, keys0, keys1, hashFunctionGeneratorFactories, probCompFactories,
-                buildCompFactories, recordDescriptor, tupPaircomparatorFactory01, tupPaircomparatorFactory10,
-                predEvaluatorFactory, false, null);
+        this(spec, memSizeInFrames, inputsize0, factor, keys0, keys1, propHashFunctionFactories,
+                buildHashFunctionFactories, probCompFactories, buildCompFactories, recordDescriptor,
+                tupPaircomparatorFactory01, tupPaircomparatorFactory10, predEvaluatorFactory, false, null);
     }
 
     @Override
@@ -279,10 +283,9 @@ public class OptimizedHybridHashJoinOperatorDescriptor extends AbstractOperatorD
                         ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition));
 
                 ITuplePartitionComputer probeHpc =
-                        new FieldHashPartitionComputerFamily(probeKeys, hashFunctionGeneratorFactories)
-                                .createPartitioner(0);
+                        new FieldHashPartitionComputerFamily(probeKeys, propHashFunctionFactories).createPartitioner(0);
                 ITuplePartitionComputer buildHpc =
-                        new FieldHashPartitionComputerFamily(buildKeys, hashFunctionGeneratorFactories)
+                        new FieldHashPartitionComputerFamily(buildKeys, buildHashFunctionFactories)
                                 .createPartitioner(0);
                 boolean isFailed = false;
 
@@ -483,10 +486,10 @@ public class OptimizedHybridHashJoinOperatorDescriptor extends AbstractOperatorD
                 private void joinPartitionPair(RunFileReader buildSideReader, RunFileReader probeSideReader,
                         int buildSizeInTuple, int probeSizeInTuple, int level) throws HyracksDataException {
                     ITuplePartitionComputer probeHpc =
-                            new FieldHashPartitionComputerFamily(probeKeys, hashFunctionGeneratorFactories)
+                            new FieldHashPartitionComputerFamily(probeKeys, propHashFunctionFactories)
                                     .createPartitioner(level);
                     ITuplePartitionComputer buildHpc =
-                            new FieldHashPartitionComputerFamily(buildKeys, hashFunctionGeneratorFactories)
+                            new FieldHashPartitionComputerFamily(buildKeys, buildHashFunctionFactories)
                                     .createPartitioner(level);
 
                     int frameSize = ctx.getInitialFrameSize();
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
index 94ea8c5..fb879b4 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
@@ -132,6 +132,7 @@ public class TPCHCustomerOrderHashJoinTest extends AbstractIntegrationTest {
         InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(spec, new int[] { 1 },
                 new int[] { 0 },
                 new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 custOrderJoinDesc, 128, null, 128);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
@@ -179,6 +180,7 @@ public class TPCHCustomerOrderHashJoinTest extends AbstractIntegrationTest {
         OptimizedHybridHashJoinOperatorDescriptor join = new OptimizedHybridHashJoinOperatorDescriptor(spec, 32, 20,
                 1.2, new int[] { 1 }, new int[] { 0 },
                 new IBinaryHashFunctionFamily[] { MurmurHash3BinaryHashFunctionFamily.INSTANCE },
+                new IBinaryHashFunctionFamily[] { MurmurHash3BinaryHashFunctionFamily.INSTANCE },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 custOrderJoinDesc,
@@ -236,6 +238,7 @@ public class TPCHCustomerOrderHashJoinTest extends AbstractIntegrationTest {
         InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(spec, new int[] { 0 },
                 new int[] { 1 },
                 new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 null, custOrderJoinDesc, true, nonMatchWriterFactories, 128, 128);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
@@ -288,6 +291,7 @@ public class TPCHCustomerOrderHashJoinTest extends AbstractIntegrationTest {
         OptimizedHybridHashJoinOperatorDescriptor join = new OptimizedHybridHashJoinOperatorDescriptor(spec, 32, 20,
                 1.2, new int[] { 0 }, new int[] { 1 },
                 new IBinaryHashFunctionFamily[] { MurmurHash3BinaryHashFunctionFamily.INSTANCE },
+                new IBinaryHashFunctionFamily[] { MurmurHash3BinaryHashFunctionFamily.INSTANCE },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 custOrderJoinDesc,
@@ -345,6 +349,7 @@ public class TPCHCustomerOrderHashJoinTest extends AbstractIntegrationTest {
         InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(spec, new int[] { 1 },
                 new int[] { 0 },
                 new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 custOrderJoinDesc, 128, null, 128);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
@@ -402,6 +407,7 @@ public class TPCHCustomerOrderHashJoinTest extends AbstractIntegrationTest {
         OptimizedHybridHashJoinOperatorDescriptor join = new OptimizedHybridHashJoinOperatorDescriptor(spec, 5, 20, 1.2,
                 new int[] { 1 }, new int[] { 0 },
                 new IBinaryHashFunctionFamily[] { MurmurHash3BinaryHashFunctionFamily.INSTANCE },
+                new IBinaryHashFunctionFamily[] { MurmurHash3BinaryHashFunctionFamily.INSTANCE },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 custOrderJoinDesc,
@@ -464,6 +470,7 @@ public class TPCHCustomerOrderHashJoinTest extends AbstractIntegrationTest {
         InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(spec, new int[] { 1 },
                 new int[] { 0 },
                 new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 custOrderJoinDesc, 128, null, 128);
         PartitionConstraintHelper.addPartitionCountConstraint(spec, join, 2);
@@ -527,6 +534,7 @@ public class TPCHCustomerOrderHashJoinTest extends AbstractIntegrationTest {
         InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(spec, new int[] { 1 },
                 new int[] { 0 },
                 new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 custOrderJoinDesc, 128, null, 128);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
@@ -583,6 +591,7 @@ public class TPCHCustomerOrderHashJoinTest extends AbstractIntegrationTest {
         OptimizedHybridHashJoinOperatorDescriptor join = new OptimizedHybridHashJoinOperatorDescriptor(spec, 15, 243,
                 1.2, new int[] { 0 }, new int[] { 1 },
                 new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE },
+                new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 custOrderJoinDesc,
@@ -634,6 +643,7 @@ public class TPCHCustomerOrderHashJoinTest extends AbstractIntegrationTest {
         OptimizedHybridHashJoinOperatorDescriptor join = new OptimizedHybridHashJoinOperatorDescriptor(spec, 15, 122,
                 1.2, new int[] { 0 }, new int[] { 1 },
                 new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE },
+                new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 custOrderJoinDesc,
@@ -686,6 +696,7 @@ public class TPCHCustomerOrderHashJoinTest extends AbstractIntegrationTest {
         OptimizedHybridHashJoinOperatorDescriptor join = new OptimizedHybridHashJoinOperatorDescriptor(spec, 6, 122,
                 1.2, new int[] { 0 }, new int[] { 1 },
                 new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE },
+                new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 custOrderJoinDesc,
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Join.java b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Join.java
index 7744974..8c30ba1 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Join.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Join.java
@@ -182,6 +182,8 @@ public class Join {
             join = new InMemoryHashJoinOperatorDescriptor(spec, new int[] { 0 }, new int[] { 1 },
                     new IBinaryHashFunctionFactory[] {
                             PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                    new IBinaryHashFunctionFactory[] {
+                            PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
                     new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                     Common.custOrderJoinDesc, tableSize, null, memSize * frameSize);
 
@@ -189,6 +191,7 @@ public class Join {
             join = new OptimizedHybridHashJoinOperatorDescriptor(spec, memSize, graceInputSize, graceFactor,
                     new int[] { 0 }, new int[] { 1 },
                     new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE },
+                    new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE },
                     new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                     new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                     Common.custOrderJoinDesc,