You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datafu.apache.org by ey...@apache.org on 2022/10/20 04:53:07 UTC
[datafu] branch master updated: Update testing script and testing library versions
This is an automated email from the ASF dual-hosted git repository.
eyal pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/datafu.git
The following commit(s) were added to refs/heads/master by this push:
new 32257d8 Update testing script and testing library versions
32257d8 is described below
commit 32257d8333a79390a0db9ee52d87bc1d1c94da05
Author: Eyal Allweil <ey...@apache.org>
AuthorDate: Wed Oct 19 20:44:34 2022 +0300
Update testing script and testing library versions
---
datafu-spark/build.gradle | 12 ++++++++----
datafu-spark/build_and_test_spark.sh | 4 ++--
.../test/scala/datafu/spark/PySparkLibTestResources.scala | 8 ++------
gradle/dependency-versions.gradle | 5 ++---
4 files changed, 14 insertions(+), 15 deletions(-)
diff --git a/datafu-spark/build.gradle b/datafu-spark/build.gradle
index 6b86920..4d0424b 100644
--- a/datafu-spark/build.gradle
+++ b/datafu-spark/build.gradle
@@ -62,8 +62,13 @@ dependencies {
testCompile "org.apache.logging.log4j:log4j-1.2-api:$log4j2Version"
testCompile "org.apache.logging.log4j:log4j-slf4j-impl:$log4j2Version"
- testCompile "com.holdenkarau:spark-testing-base_" + scalaCompatVersion + ":" + sparkVersion + "_" + sparkTestingBaseVersion
- testCompile "org.scalatest:scalatest_" + scalaCompatVersion + ":" + scalaTestVersion
+
+// there isn't a spark-test-base version for 2.3.4, but 2.3.3 will work instead
+ if (sparkVersion == "2.3.4") {
+ testCompile "com.holdenkarau:spark-testing-base_" + scalaCompatVersion + ":2.3.3_" + sparkTestingBaseVersion
+ } else {
+ testCompile "com.holdenkarau:spark-testing-base_" + scalaCompatVersion + ":" + sparkVersion + "_" + sparkTestingBaseVersion
+ }
}
// we need to set up the build for hadoop 3
@@ -115,7 +120,6 @@ zipPySpark.onlyIf {
// download py4j for testing. This is not shipped with datafu-spark.
project.ext.py4js = [
- "py4j-0.10.4-src.zip" : "https://files.pythonhosted.org/packages/93/a7/0e1719e8ad34d194aae72dc07a37e65fd3895db7c797a67a828333cd6067/py4j-0.10.4-py2.py3-none-any.whl",
"py4j-0.10.6-src.zip" : "https://files.pythonhosted.org/packages/4a/08/162710786239aa72bd72bb46c64f2b02f54250412ba928cb373b30699139/py4j-0.10.6-py2.py3-none-any.whl",
"py4j-0.10.7-src.zip" : "https://files.pythonhosted.org/packages/e3/53/c737818eb9a7dc32a7cd4f1396e787bd94200c3997c72c1dbe028587bd76/py4j-0.10.7-py2.py3-none-any.whl",
"py4j-0.10.8.1-src.zip" : "https://files.pythonhosted.org/packages/04/de/2d314a921ef4c20b283e1de94e0780273678caac901564df06b948e4ba9b/py4j-0.10.8.1-py2.py3-none-any.whl"
@@ -156,4 +160,4 @@ configurations.all {
exclude group: 'log4j', module:'log4j'
exclude group: 'log4j', module:'apache-log4j-extras'
exclude group: 'org.slf4j', module:'slf4j-log4j12'
-}
\ No newline at end of file
+}
diff --git a/datafu-spark/build_and_test_spark.sh b/datafu-spark/build_and_test_spark.sh
index 744ecd3..51a0b5b 100755
--- a/datafu-spark/build_and_test_spark.sh
+++ b/datafu-spark/build_and_test_spark.sh
@@ -36,9 +36,9 @@ function build {
echo "----- Building versions for Scala $scala, Spark $spark ----"
if ./gradlew :datafu-spark:clean; then
echo "----- Clean for Scala $scala, spark $spark succeeded"
- if ./gradlew :datafu-spark:assemble -PscalaVersion=$scala -PsparkVersion=$spark; then
+ if ./gradlew :datafu-spark:assemble -PscalaVersion=$scala -PsparkVersion=$spark -PscalaCompatVersion=$scala; then
echo "----- Build for Scala $scala, spark $spark succeeded"
- if ./gradlew :datafu-spark:test -PscalaVersion=$scala -PsparkVersion=$spark $TEST_PARAMS; then
+ if ./gradlew :datafu-spark:test -PscalaVersion=$scala -PsparkVersion=$spark -PscalaCompatVersion=$scala $TEST_PARAMS; then
log "Testing for Scala $scala, spark $spark succeeded"
if [[ $JARS_DIR != "NONE" ]]; then
cp datafu-spark/build/libs/*.jar $JARS_DIR/
diff --git a/datafu-spark/src/test/scala/datafu/spark/PySparkLibTestResources.scala b/datafu-spark/src/test/scala/datafu/spark/PySparkLibTestResources.scala
index 5086295..ee275c9 100644
--- a/datafu-spark/src/test/scala/datafu/spark/PySparkLibTestResources.scala
+++ b/datafu-spark/src/test/scala/datafu/spark/PySparkLibTestResources.scala
@@ -29,10 +29,6 @@ object PathsResolver {
val sparkSystemVersion = System.getProperty("datafu.spark.version")
val py4js = Map(
- "2.1.0" -> "0.10.4",
- "2.1.1" -> "0.10.4",
- "2.1.2" -> "0.10.4",
- "2.1.3" -> "0.10.4",
"2.2.0" -> "0.10.7",
"2.2.1" -> "0.10.7",
"2.2.2" -> "0.10.7",
@@ -45,9 +41,9 @@ object PathsResolver {
"2.4.3" -> "0.10.8.1"
)
- val sparkVersion = if (sparkSystemVersion == null) "2.3.0" else sparkSystemVersion
+ val sparkVersion = if (sparkSystemVersion == null) "2.4.3" else sparkSystemVersion
- val py4jVersion = py4js.getOrElse(sparkVersion, "0.10.6") // our default
+ val py4jVersion = py4js.getOrElse(sparkVersion, "0.10.8.1") // our default
val pyspark = ResourceCloning.cloneResource(new File("data/pysparks/pyspark-" + sparkVersion + ".zip").toURI().toURL(),
"pyspark_cloned.zip").getPath
diff --git a/gradle/dependency-versions.gradle b/gradle/dependency-versions.gradle
index 091b37d..f48bd47 100644
--- a/gradle/dependency-versions.gradle
+++ b/gradle/dependency-versions.gradle
@@ -31,7 +31,7 @@ ext {
mavenVersion="2.1.3"
jlineVersion="0.9.94"
pigVersion="0.14.0"
- testngVersion="6.2"
+ testngVersion="7.5"
toolsVersion="1.4.2"
wagonHttpVersion="1.0-beta-2"
openNlpVersion="1.5.3"
@@ -39,6 +39,5 @@ ext {
jsonVersion="1.1.1"
jsr311Version="1.1.1"
slf4jVersion="1.6.4"
- sparkTestingBaseVersion = "0.12.0"
- scalaTestVersion="2.3.0"
+ sparkTestingBaseVersion = "0.14.0"
}