You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2019/11/03 19:21:53 UTC
[spark] branch master updated: [SPARK-29733][TESTS] Fix wrong order
of parameters passed to `assertEquals`
This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 80a8987 [SPARK-29733][TESTS] Fix wrong order of parameters passed to `assertEquals`
80a8987 is described below
commit 80a89873b20aa07e2522bed5da0fc50e616246d9
Author: Maxim Gekk <ma...@gmail.com>
AuthorDate: Sun Nov 3 11:21:28 2019 -0800
[SPARK-29733][TESTS] Fix wrong order of parameters passed to `assertEquals`
### What changes were proposed in this pull request?
The `assertEquals` method of JUnit Assert requires the first parameter to be the expected value. In this PR, I propose to change the order of parameters when the expected value is passed as the second parameter.
### Why are the changes needed?
Wrong order of assert parameters confuses when the assert fails and the parameters have special string representation. For example:
```java
assertEquals(input1.add(input2), new CalendarInterval(5, 5, 367200000000L));
```
```
java.lang.AssertionError:
Expected :interval 5 months 5 days 101 hours
Actual :interval 5 months 5 days 102 hours
```
### Does this PR introduce any user-facing change?
No
### How was this patch tested?
By existing tests.
Closes #26377 from MaxGekk/fix-order-in-assert-equals.
Authored-by: Maxim Gekk <ma...@gmail.com>
Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
.../apache/spark/network/RpcIntegrationSuite.java | 6 +-
.../org/apache/spark/unsafe/PlatformUtilSuite.java | 20 ++--
.../spark/unsafe/types/CalendarIntervalSuite.java | 8 +-
.../apache/spark/unsafe/types/UTF8StringSuite.java | 126 ++++++++++-----------
.../apache/spark/launcher/SparkLauncherSuite.java | 4 +-
.../spark/util/SerializableConfigurationSuite.java | 2 +-
.../unsafe/sort/UnsafeExternalSorterSuite.java | 4 +-
.../spark/launcher/ChildProcAppHandleSuite.java | 8 +-
.../JavaLogisticRegressionSuite.java | 6 +-
.../ml/classification/JavaOneVsRestSuite.java | 8 +-
.../spark/ml/feature/JavaHashingTFSuite.java | 2 +-
.../spark/ml/feature/JavaVectorIndexerSuite.java | 4 +-
.../spark/ml/feature/JavaVectorSlicerSuite.java | 2 +-
.../apache/spark/ml/feature/JavaWord2VecSuite.java | 2 +-
.../org/apache/spark/ml/param/JavaParamsSuite.java | 6 +-
.../mllib/clustering/JavaGaussianMixtureSuite.java | 2 +-
.../expressions/RowBasedKeyValueBatchSuite.java | 8 +-
.../spark/sql/JavaBeanDeserializationSuite.java | 4 +-
.../org/apache/spark/sql/JavaDataFrameSuite.java | 4 +-
.../org/apache/spark/sql/JavaDatasetSuite.java | 10 +-
.../spark/streaming/JavaWriteAheadLogSuite.java | 2 +-
21 files changed, 119 insertions(+), 119 deletions(-)
diff --git a/common/network-common/src/test/java/org/apache/spark/network/RpcIntegrationSuite.java b/common/network-common/src/test/java/org/apache/spark/network/RpcIntegrationSuite.java
index 498dc51..916c140 100644
--- a/common/network-common/src/test/java/org/apache/spark/network/RpcIntegrationSuite.java
+++ b/common/network-common/src/test/java/org/apache/spark/network/RpcIntegrationSuite.java
@@ -260,14 +260,14 @@ public class RpcIntegrationSuite {
@Test
public void singleRPC() throws Exception {
RpcResult res = sendRPC("hello/Aaron");
- assertEquals(res.successMessages, Sets.newHashSet("Hello, Aaron!"));
+ assertEquals(Sets.newHashSet("Hello, Aaron!"), res.successMessages);
assertTrue(res.errorMessages.isEmpty());
}
@Test
public void doubleRPC() throws Exception {
RpcResult res = sendRPC("hello/Aaron", "hello/Reynold");
- assertEquals(res.successMessages, Sets.newHashSet("Hello, Aaron!", "Hello, Reynold!"));
+ assertEquals(Sets.newHashSet("Hello, Aaron!", "Hello, Reynold!"), res.successMessages);
assertTrue(res.errorMessages.isEmpty());
}
@@ -295,7 +295,7 @@ public class RpcIntegrationSuite {
@Test
public void sendSuccessAndFailure() throws Exception {
RpcResult res = sendRPC("hello/Bob", "throw error/the", "hello/Builder", "return error/!");
- assertEquals(res.successMessages, Sets.newHashSet("Hello, Bob!", "Hello, Builder!"));
+ assertEquals(Sets.newHashSet("Hello, Bob!", "Hello, Builder!"), res.successMessages);
assertErrorsContain(res.errorMessages, Sets.newHashSet("Thrown: the", "Returned: !"));
}
diff --git a/common/unsafe/src/test/java/org/apache/spark/unsafe/PlatformUtilSuite.java b/common/unsafe/src/test/java/org/apache/spark/unsafe/PlatformUtilSuite.java
index 3ad9ac7..19e4182 100644
--- a/common/unsafe/src/test/java/org/apache/spark/unsafe/PlatformUtilSuite.java
+++ b/common/unsafe/src/test/java/org/apache/spark/unsafe/PlatformUtilSuite.java
@@ -114,25 +114,25 @@ public class PlatformUtilSuite {
Assert.assertTrue(MemoryAllocator.MEMORY_DEBUG_FILL_ENABLED);
MemoryBlock onheap = MemoryAllocator.HEAP.allocate(1);
Assert.assertEquals(
- Platform.getByte(onheap.getBaseObject(), onheap.getBaseOffset()),
- MemoryAllocator.MEMORY_DEBUG_FILL_CLEAN_VALUE);
+ MemoryAllocator.MEMORY_DEBUG_FILL_CLEAN_VALUE,
+ Platform.getByte(onheap.getBaseObject(), onheap.getBaseOffset()));
MemoryBlock onheap1 = MemoryAllocator.HEAP.allocate(1024 * 1024);
Object onheap1BaseObject = onheap1.getBaseObject();
long onheap1BaseOffset = onheap1.getBaseOffset();
MemoryAllocator.HEAP.free(onheap1);
Assert.assertEquals(
- Platform.getByte(onheap1BaseObject, onheap1BaseOffset),
- MemoryAllocator.MEMORY_DEBUG_FILL_FREED_VALUE);
+ MemoryAllocator.MEMORY_DEBUG_FILL_FREED_VALUE,
+ Platform.getByte(onheap1BaseObject, onheap1BaseOffset));
MemoryBlock onheap2 = MemoryAllocator.HEAP.allocate(1024 * 1024);
Assert.assertEquals(
- Platform.getByte(onheap2.getBaseObject(), onheap2.getBaseOffset()),
- MemoryAllocator.MEMORY_DEBUG_FILL_CLEAN_VALUE);
+ MemoryAllocator.MEMORY_DEBUG_FILL_CLEAN_VALUE,
+ Platform.getByte(onheap2.getBaseObject(), onheap2.getBaseOffset()));
MemoryBlock offheap = MemoryAllocator.UNSAFE.allocate(1);
Assert.assertEquals(
- Platform.getByte(offheap.getBaseObject(), offheap.getBaseOffset()),
- MemoryAllocator.MEMORY_DEBUG_FILL_CLEAN_VALUE);
+ MemoryAllocator.MEMORY_DEBUG_FILL_CLEAN_VALUE,
+ Platform.getByte(offheap.getBaseObject(), offheap.getBaseOffset()));
MemoryAllocator.UNSAFE.free(offheap);
}
@@ -150,11 +150,11 @@ public class PlatformUtilSuite {
// The size is greater than `HeapMemoryAllocator.POOLING_THRESHOLD_BYTES`,
// reuse the previous memory which has released.
MemoryBlock onheap3 = heapMem.allocate(1024 * 1024 + 1);
- Assert.assertEquals(onheap3.size(), 1024 * 1024 + 1);
+ Assert.assertEquals(1024 * 1024 + 1, onheap3.size());
Object obj3 = onheap3.getBaseObject();
heapMem.free(onheap3);
MemoryBlock onheap4 = heapMem.allocate(1024 * 1024 + 7);
- Assert.assertEquals(onheap4.size(), 1024 * 1024 + 7);
+ Assert.assertEquals(1024 * 1024 + 7, onheap4.size());
Assert.assertEquals(obj3, onheap4.getBaseObject());
}
}
diff --git a/common/unsafe/src/test/java/org/apache/spark/unsafe/types/CalendarIntervalSuite.java b/common/unsafe/src/test/java/org/apache/spark/unsafe/types/CalendarIntervalSuite.java
index c3ed57c..df45b98 100644
--- a/common/unsafe/src/test/java/org/apache/spark/unsafe/types/CalendarIntervalSuite.java
+++ b/common/unsafe/src/test/java/org/apache/spark/unsafe/types/CalendarIntervalSuite.java
@@ -77,21 +77,21 @@ public class CalendarIntervalSuite {
public void addTest() {
CalendarInterval input1 = new CalendarInterval(3, 1, 1 * MICROS_PER_HOUR);
CalendarInterval input2 = new CalendarInterval(2, 4, 100 * MICROS_PER_HOUR);
- assertEquals(input1.add(input2), new CalendarInterval(5, 5, 101 * MICROS_PER_HOUR));
+ assertEquals(new CalendarInterval(5, 5, 101 * MICROS_PER_HOUR), input1.add(input2));
input1 = new CalendarInterval(-10, -30, -81 * MICROS_PER_HOUR);
input2 = new CalendarInterval(75, 150, 200 * MICROS_PER_HOUR);
- assertEquals(input1.add(input2), new CalendarInterval(65, 120, 119 * MICROS_PER_HOUR));
+ assertEquals(new CalendarInterval(65, 120, 119 * MICROS_PER_HOUR), input1.add(input2));
}
@Test
public void subtractTest() {
CalendarInterval input1 = new CalendarInterval(3, 1, 1 * MICROS_PER_HOUR);
CalendarInterval input2 = new CalendarInterval(2, 4, 100 * MICROS_PER_HOUR);
- assertEquals(input1.subtract(input2), new CalendarInterval(1, -3, -99 * MICROS_PER_HOUR));
+ assertEquals(new CalendarInterval(1, -3, -99 * MICROS_PER_HOUR), input1.subtract(input2));
input1 = new CalendarInterval(-10, -30, -81 * MICROS_PER_HOUR);
input2 = new CalendarInterval(75, 150, 200 * MICROS_PER_HOUR);
- assertEquals(input1.subtract(input2), new CalendarInterval(-85, -180, -281 * MICROS_PER_HOUR));
+ assertEquals(new CalendarInterval(-85, -180, -281 * MICROS_PER_HOUR), input1.subtract(input2));
}
}
diff --git a/common/unsafe/src/test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java b/common/unsafe/src/test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java
index cd253c0..dbede9b 100644
--- a/common/unsafe/src/test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java
+++ b/common/unsafe/src/test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java
@@ -38,11 +38,11 @@ public class UTF8StringSuite {
private static void checkBasic(String str, int len) {
UTF8String s1 = fromString(str);
UTF8String s2 = fromBytes(str.getBytes(StandardCharsets.UTF_8));
- assertEquals(s1.numChars(), len);
- assertEquals(s2.numChars(), len);
+ assertEquals(len, s1.numChars());
+ assertEquals(len, s2.numChars());
- assertEquals(s1.toString(), str);
- assertEquals(s2.toString(), str);
+ assertEquals(str, s1.toString());
+ assertEquals(str, s2.toString());
assertEquals(s1, s2);
assertEquals(s1.hashCode(), s2.hashCode());
@@ -375,20 +375,20 @@ public class UTF8StringSuite {
@Test
public void substringSQL() {
UTF8String e = fromString("example");
- assertEquals(e.substringSQL(0, 2), fromString("ex"));
- assertEquals(e.substringSQL(1, 2), fromString("ex"));
- assertEquals(e.substringSQL(0, 7), fromString("example"));
- assertEquals(e.substringSQL(1, 2), fromString("ex"));
- assertEquals(e.substringSQL(0, 100), fromString("example"));
- assertEquals(e.substringSQL(1, 100), fromString("example"));
- assertEquals(e.substringSQL(2, 2), fromString("xa"));
- assertEquals(e.substringSQL(1, 6), fromString("exampl"));
- assertEquals(e.substringSQL(2, 100), fromString("xample"));
- assertEquals(e.substringSQL(0, 0), fromString(""));
- assertEquals(e.substringSQL(100, 4), EMPTY_UTF8);
- assertEquals(e.substringSQL(0, Integer.MAX_VALUE), fromString("example"));
- assertEquals(e.substringSQL(1, Integer.MAX_VALUE), fromString("example"));
- assertEquals(e.substringSQL(2, Integer.MAX_VALUE), fromString("xample"));
+ assertEquals(fromString("ex"), e.substringSQL(0, 2));
+ assertEquals(fromString("ex"), e.substringSQL(1, 2));
+ assertEquals(fromString("example"), e.substringSQL(0, 7));
+ assertEquals(fromString("ex"), e.substringSQL(1, 2));
+ assertEquals(fromString("example"), e.substringSQL(0, 100));
+ assertEquals(fromString("example"), e.substringSQL(1, 100));
+ assertEquals(fromString("xa"), e.substringSQL(2, 2));
+ assertEquals(fromString("exampl"), e.substringSQL(1, 6));
+ assertEquals(fromString("xample"), e.substringSQL(2, 100));
+ assertEquals(fromString(""), e.substringSQL(0, 0));
+ assertEquals(EMPTY_UTF8, e.substringSQL(100, 4));
+ assertEquals(fromString("example"), e.substringSQL(0, Integer.MAX_VALUE));
+ assertEquals(fromString("example"), e.substringSQL(1, Integer.MAX_VALUE));
+ assertEquals(fromString("xample"), e.substringSQL(2, Integer.MAX_VALUE));
}
@Test
@@ -506,50 +506,50 @@ public class UTF8StringSuite {
@Test
public void soundex() {
- assertEquals(fromString("Robert").soundex(), fromString("R163"));
- assertEquals(fromString("Rupert").soundex(), fromString("R163"));
- assertEquals(fromString("Rubin").soundex(), fromString("R150"));
- assertEquals(fromString("Ashcraft").soundex(), fromString("A261"));
- assertEquals(fromString("Ashcroft").soundex(), fromString("A261"));
- assertEquals(fromString("Burroughs").soundex(), fromString("B620"));
- assertEquals(fromString("Burrows").soundex(), fromString("B620"));
- assertEquals(fromString("Ekzampul").soundex(), fromString("E251"));
- assertEquals(fromString("Example").soundex(), fromString("E251"));
- assertEquals(fromString("Ellery").soundex(), fromString("E460"));
- assertEquals(fromString("Euler").soundex(), fromString("E460"));
- assertEquals(fromString("Ghosh").soundex(), fromString("G200"));
- assertEquals(fromString("Gauss").soundex(), fromString("G200"));
- assertEquals(fromString("Gutierrez").soundex(), fromString("G362"));
- assertEquals(fromString("Heilbronn").soundex(), fromString("H416"));
- assertEquals(fromString("Hilbert").soundex(), fromString("H416"));
- assertEquals(fromString("Jackson").soundex(), fromString("J250"));
- assertEquals(fromString("Kant").soundex(), fromString("K530"));
- assertEquals(fromString("Knuth").soundex(), fromString("K530"));
- assertEquals(fromString("Lee").soundex(), fromString("L000"));
- assertEquals(fromString("Lukasiewicz").soundex(), fromString("L222"));
- assertEquals(fromString("Lissajous").soundex(), fromString("L222"));
- assertEquals(fromString("Ladd").soundex(), fromString("L300"));
- assertEquals(fromString("Lloyd").soundex(), fromString("L300"));
- assertEquals(fromString("Moses").soundex(), fromString("M220"));
- assertEquals(fromString("O'Hara").soundex(), fromString("O600"));
- assertEquals(fromString("Pfister").soundex(), fromString("P236"));
- assertEquals(fromString("Rubin").soundex(), fromString("R150"));
- assertEquals(fromString("Robert").soundex(), fromString("R163"));
- assertEquals(fromString("Rupert").soundex(), fromString("R163"));
- assertEquals(fromString("Soundex").soundex(), fromString("S532"));
- assertEquals(fromString("Sownteks").soundex(), fromString("S532"));
- assertEquals(fromString("Tymczak").soundex(), fromString("T522"));
- assertEquals(fromString("VanDeusen").soundex(), fromString("V532"));
- assertEquals(fromString("Washington").soundex(), fromString("W252"));
- assertEquals(fromString("Wheaton").soundex(), fromString("W350"));
-
- assertEquals(fromString("a").soundex(), fromString("A000"));
- assertEquals(fromString("ab").soundex(), fromString("A100"));
- assertEquals(fromString("abc").soundex(), fromString("A120"));
- assertEquals(fromString("abcd").soundex(), fromString("A123"));
- assertEquals(fromString("").soundex(), fromString(""));
- assertEquals(fromString("123").soundex(), fromString("123"));
- assertEquals(fromString("世界千世").soundex(), fromString("世界千世"));
+ assertEquals(fromString("R163"), fromString("Robert").soundex());
+ assertEquals(fromString("R163"), fromString("Rupert").soundex());
+ assertEquals(fromString("R150"), fromString("Rubin").soundex());
+ assertEquals(fromString("A261"), fromString("Ashcraft").soundex());
+ assertEquals(fromString("A261"), fromString("Ashcroft").soundex());
+ assertEquals(fromString("B620"), fromString("Burroughs").soundex());
+ assertEquals(fromString("B620"), fromString("Burrows").soundex());
+ assertEquals(fromString("E251"), fromString("Ekzampul").soundex());
+ assertEquals(fromString("E251"), fromString("Example").soundex());
+ assertEquals(fromString("E460"), fromString("Ellery").soundex());
+ assertEquals(fromString("E460"), fromString("Euler").soundex());
+ assertEquals(fromString("G200"), fromString("Ghosh").soundex());
+ assertEquals(fromString("G200"), fromString("Gauss").soundex());
+ assertEquals(fromString("G362"), fromString("Gutierrez").soundex());
+ assertEquals(fromString("H416"), fromString("Heilbronn").soundex());
+ assertEquals(fromString("H416"), fromString("Hilbert").soundex());
+ assertEquals(fromString("J250"), fromString("Jackson").soundex());
+ assertEquals(fromString("K530"), fromString("Kant").soundex());
+ assertEquals(fromString("K530"), fromString("Knuth").soundex());
+ assertEquals(fromString("L000"), fromString("Lee").soundex());
+ assertEquals(fromString("L222"), fromString("Lukasiewicz").soundex());
+ assertEquals(fromString("L222"), fromString("Lissajous").soundex());
+ assertEquals(fromString("L300"), fromString("Ladd").soundex());
+ assertEquals(fromString("L300"), fromString("Lloyd").soundex());
+ assertEquals(fromString("M220"), fromString("Moses").soundex());
+ assertEquals(fromString("O600"), fromString("O'Hara").soundex());
+ assertEquals(fromString("P236"), fromString("Pfister").soundex());
+ assertEquals(fromString("R150"), fromString("Rubin").soundex());
+ assertEquals(fromString("R163"), fromString("Robert").soundex());
+ assertEquals(fromString("R163"), fromString("Rupert").soundex());
+ assertEquals(fromString("S532"), fromString("Soundex").soundex());
+ assertEquals(fromString("S532"), fromString("Sownteks").soundex());
+ assertEquals(fromString("T522"), fromString("Tymczak").soundex());
+ assertEquals(fromString("V532"), fromString("VanDeusen").soundex());
+ assertEquals(fromString("W252"), fromString("Washington").soundex());
+ assertEquals(fromString("W350"), fromString("Wheaton").soundex());
+
+ assertEquals(fromString("A000"), fromString("a").soundex());
+ assertEquals(fromString("A100"), fromString("ab").soundex());
+ assertEquals(fromString("A120"), fromString("abc").soundex());
+ assertEquals(fromString("A123"), fromString("abcd").soundex());
+ assertEquals(fromString(""), fromString("").soundex());
+ assertEquals(fromString("123"), fromString("123").soundex());
+ assertEquals(fromString("世界千世"), fromString("世界千世").soundex());
}
@Test
@@ -849,7 +849,7 @@ public class UTF8StringSuite {
for (int i = 0; i < wrongFirstBytes.length; ++i) {
c[0] = (byte)wrongFirstBytes[i];
- assertEquals(fromBytes(c).numChars(), 1);
+ assertEquals(1, fromBytes(c).numChars());
}
}
}
diff --git a/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java b/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
index 773c390..fb85238 100644
--- a/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
+++ b/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
@@ -323,7 +323,7 @@ public class SparkLauncherSuite extends BaseSuite {
public static void main(String[] args) throws Exception {
assertNotEquals(0, args.length);
- assertEquals(args[0], "hello");
+ assertEquals("hello", args[0]);
new SparkContext().stop();
synchronized (LOCK) {
@@ -340,7 +340,7 @@ public class SparkLauncherSuite extends BaseSuite {
public static void main(String[] args) {
assertNotEquals(0, args.length);
- assertEquals(args[0], "hello");
+ assertEquals("hello", args[0]);
throw DUMMY_EXCEPTION;
}
}
diff --git a/core/src/test/java/org/apache/spark/util/SerializableConfigurationSuite.java b/core/src/test/java/org/apache/spark/util/SerializableConfigurationSuite.java
index 0944d68..28d038a 100644
--- a/core/src/test/java/org/apache/spark/util/SerializableConfigurationSuite.java
+++ b/core/src/test/java/org/apache/spark/util/SerializableConfigurationSuite.java
@@ -50,6 +50,6 @@ public class SerializableConfigurationSuite {
hadoopConfiguration.set("test.property", "value");
SerializableConfiguration scs = new SerializableConfiguration(hadoopConfiguration);
SerializableConfiguration actual = rdd.map(val -> scs).collect().get(0);
- assertEquals(actual.value().get("test.property"), "value");
+ assertEquals("value", actual.value().get("test.property"));
}
}
diff --git a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
index d5b1a1c..4397771 100644
--- a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
+++ b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
@@ -222,7 +222,7 @@ public class UnsafeExternalSorterSuite {
public void testSortTimeMetric() throws Exception {
final UnsafeExternalSorter sorter = newSorter();
long prevSortTime = sorter.getSortTimeNanos();
- assertEquals(prevSortTime, 0);
+ assertEquals(0, prevSortTime);
sorter.insertRecord(null, 0, 0, 0, false);
sorter.spill();
@@ -230,7 +230,7 @@ public class UnsafeExternalSorterSuite {
prevSortTime = sorter.getSortTimeNanos();
sorter.spill(); // no sort needed
- assertEquals(sorter.getSortTimeNanos(), prevSortTime);
+ assertEquals(prevSortTime, sorter.getSortTimeNanos());
sorter.insertRecord(null, 0, 0, 0, false);
UnsafeSorterIterator iter = sorter.getSortedIterator();
diff --git a/launcher/src/test/java/org/apache/spark/launcher/ChildProcAppHandleSuite.java b/launcher/src/test/java/org/apache/spark/launcher/ChildProcAppHandleSuite.java
index fe44efd2..d1b350f 100644
--- a/launcher/src/test/java/org/apache/spark/launcher/ChildProcAppHandleSuite.java
+++ b/launcher/src/test/java/org/apache/spark/launcher/ChildProcAppHandleSuite.java
@@ -77,11 +77,11 @@ public class ChildProcAppHandleSuite extends BaseSuite {
SparkLauncher launcher = new SparkLauncher();
launcher.redirectError(ProcessBuilder.Redirect.PIPE);
assertNotNull(launcher.errorStream);
- assertEquals(launcher.errorStream.type(), ProcessBuilder.Redirect.Type.PIPE);
+ assertEquals(ProcessBuilder.Redirect.Type.PIPE, launcher.errorStream.type());
launcher.redirectOutput(ProcessBuilder.Redirect.PIPE);
assertNotNull(launcher.outputStream);
- assertEquals(launcher.outputStream.type(), ProcessBuilder.Redirect.Type.PIPE);
+ assertEquals(ProcessBuilder.Redirect.Type.PIPE, launcher.outputStream.type());
}
@Test
@@ -89,11 +89,11 @@ public class ChildProcAppHandleSuite extends BaseSuite {
SparkLauncher launcher = new SparkLauncher();
launcher.redirectError(ProcessBuilder.Redirect.PIPE)
.redirectError(ProcessBuilder.Redirect.INHERIT);
- assertEquals(launcher.errorStream.type(), ProcessBuilder.Redirect.Type.INHERIT);
+ assertEquals(ProcessBuilder.Redirect.Type.INHERIT, launcher.errorStream.type());
launcher.redirectOutput(ProcessBuilder.Redirect.PIPE)
.redirectOutput(ProcessBuilder.Redirect.INHERIT);
- assertEquals(launcher.outputStream.type(), ProcessBuilder.Redirect.Type.INHERIT);
+ assertEquals(ProcessBuilder.Redirect.Type.INHERIT, launcher.outputStream.type());
}
@Test
diff --git a/mllib/src/test/java/org/apache/spark/ml/classification/JavaLogisticRegressionSuite.java b/mllib/src/test/java/org/apache/spark/ml/classification/JavaLogisticRegressionSuite.java
index 0041021..49ac493 100644
--- a/mllib/src/test/java/org/apache/spark/ml/classification/JavaLogisticRegressionSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/classification/JavaLogisticRegressionSuite.java
@@ -50,7 +50,7 @@ public class JavaLogisticRegressionSuite extends SharedSparkSession {
@Test
public void logisticRegressionDefaultParams() {
LogisticRegression lr = new LogisticRegression();
- Assert.assertEquals(lr.getLabelCol(), "label");
+ Assert.assertEquals("label", lr.getLabelCol());
LogisticRegressionModel model = lr.fit(dataset);
model.transform(dataset).createOrReplaceTempView("prediction");
Dataset<Row> predictions = spark.sql("SELECT label, probability, prediction FROM prediction");
@@ -119,8 +119,8 @@ public class JavaLogisticRegressionSuite extends SharedSparkSession {
for (Row row : trans1.collectAsList()) {
Vector raw = (Vector) row.get(0);
Vector prob = (Vector) row.get(1);
- Assert.assertEquals(raw.size(), 2);
- Assert.assertEquals(prob.size(), 2);
+ Assert.assertEquals(2, raw.size());
+ Assert.assertEquals(2, prob.size());
double probFromRaw1 = 1.0 / (1.0 + Math.exp(-raw.apply(1)));
Assert.assertEquals(0, Math.abs(prob.apply(1) - probFromRaw1), eps);
Assert.assertEquals(0, Math.abs(prob.apply(0) - (1.0 - probFromRaw1)), eps);
diff --git a/mllib/src/test/java/org/apache/spark/ml/classification/JavaOneVsRestSuite.java b/mllib/src/test/java/org/apache/spark/ml/classification/JavaOneVsRestSuite.java
index 6194167..62888b8 100644
--- a/mllib/src/test/java/org/apache/spark/ml/classification/JavaOneVsRestSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/classification/JavaOneVsRestSuite.java
@@ -62,12 +62,12 @@ public class JavaOneVsRestSuite extends SharedSparkSession {
public void oneVsRestDefaultParams() {
OneVsRest ova = new OneVsRest();
ova.setClassifier(new LogisticRegression());
- Assert.assertEquals(ova.getLabelCol(), "label");
- Assert.assertEquals(ova.getPredictionCol(), "prediction");
+ Assert.assertEquals("label", ova.getLabelCol());
+ Assert.assertEquals("prediction", ova.getPredictionCol());
OneVsRestModel ovaModel = ova.fit(dataset);
Dataset<Row> predictions = ovaModel.transform(dataset).select("label", "prediction");
predictions.collectAsList();
- Assert.assertEquals(ovaModel.getLabelCol(), "label");
- Assert.assertEquals(ovaModel.getPredictionCol(), "prediction");
+ Assert.assertEquals("label", ovaModel.getLabelCol());
+ Assert.assertEquals("prediction", ovaModel.getPredictionCol());
}
}
diff --git a/mllib/src/test/java/org/apache/spark/ml/feature/JavaHashingTFSuite.java b/mllib/src/test/java/org/apache/spark/ml/feature/JavaHashingTFSuite.java
index 57696d0..71c6445 100644
--- a/mllib/src/test/java/org/apache/spark/ml/feature/JavaHashingTFSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/feature/JavaHashingTFSuite.java
@@ -64,7 +64,7 @@ public class JavaHashingTFSuite extends SharedSparkSession {
Dataset<Row> rescaledData = idfModel.transform(featurizedData);
for (Row r : rescaledData.select("features", "label").takeAsList(3)) {
Vector features = r.getAs(0);
- Assert.assertEquals(features.size(), numFeatures);
+ Assert.assertEquals(numFeatures, features.size());
}
}
}
diff --git a/mllib/src/test/java/org/apache/spark/ml/feature/JavaVectorIndexerSuite.java b/mllib/src/test/java/org/apache/spark/ml/feature/JavaVectorIndexerSuite.java
index ca8fae3..cf5308b 100644
--- a/mllib/src/test/java/org/apache/spark/ml/feature/JavaVectorIndexerSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/feature/JavaVectorIndexerSuite.java
@@ -47,9 +47,9 @@ public class JavaVectorIndexerSuite extends SharedSparkSession {
.setOutputCol("indexed")
.setMaxCategories(2);
VectorIndexerModel model = indexer.fit(data);
- Assert.assertEquals(model.numFeatures(), 2);
+ Assert.assertEquals(2, model.numFeatures());
Map<Integer, Map<Double, Integer>> categoryMaps = model.javaCategoryMaps();
- Assert.assertEquals(categoryMaps.size(), 1);
+ Assert.assertEquals(1, categoryMaps.size());
Dataset<Row> indexedData = model.transform(data);
}
}
diff --git a/mllib/src/test/java/org/apache/spark/ml/feature/JavaVectorSlicerSuite.java b/mllib/src/test/java/org/apache/spark/ml/feature/JavaVectorSlicerSuite.java
index 3dc2e1f..b9bca9d 100644
--- a/mllib/src/test/java/org/apache/spark/ml/feature/JavaVectorSlicerSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/feature/JavaVectorSlicerSuite.java
@@ -63,7 +63,7 @@ public class JavaVectorSlicerSuite extends SharedSparkSession {
for (Row r : output.select("userFeatures", "features").takeAsList(2)) {
Vector features = r.getAs(1);
- Assert.assertEquals(features.size(), 2);
+ Assert.assertEquals(2, features.size());
}
}
}
diff --git a/mllib/src/test/java/org/apache/spark/ml/feature/JavaWord2VecSuite.java b/mllib/src/test/java/org/apache/spark/ml/feature/JavaWord2VecSuite.java
index d0a849f..f6041e0 100644
--- a/mllib/src/test/java/org/apache/spark/ml/feature/JavaWord2VecSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/feature/JavaWord2VecSuite.java
@@ -53,7 +53,7 @@ public class JavaWord2VecSuite extends SharedSparkSession {
for (Row r : result.select("result").collectAsList()) {
double[] polyFeatures = ((Vector) r.get(0)).toArray();
- Assert.assertEquals(polyFeatures.length, 3);
+ Assert.assertEquals(3, polyFeatures.length);
}
}
}
diff --git a/mllib/src/test/java/org/apache/spark/ml/param/JavaParamsSuite.java b/mllib/src/test/java/org/apache/spark/ml/param/JavaParamsSuite.java
index 1077e10..5dae65c 100644
--- a/mllib/src/test/java/org/apache/spark/ml/param/JavaParamsSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/param/JavaParamsSuite.java
@@ -30,10 +30,10 @@ public class JavaParamsSuite {
@Test
public void testParams() {
JavaTestParams testParams = new JavaTestParams();
- Assert.assertEquals(testParams.getMyIntParam(), 1);
+ Assert.assertEquals(1, testParams.getMyIntParam());
testParams.setMyIntParam(2).setMyDoubleParam(0.4).setMyStringParam("a");
- Assert.assertEquals(testParams.getMyDoubleParam(), 0.4, 0.0);
- Assert.assertEquals(testParams.getMyStringParam(), "a");
+ Assert.assertEquals(0.4, testParams.getMyDoubleParam(), 0.0);
+ Assert.assertEquals("a", testParams.getMyStringParam());
Assert.assertArrayEquals(testParams.getMyDoubleArrayParam(), new double[]{1.0, 2.0}, 0.0);
}
diff --git a/mllib/src/test/java/org/apache/spark/mllib/clustering/JavaGaussianMixtureSuite.java b/mllib/src/test/java/org/apache/spark/mllib/clustering/JavaGaussianMixtureSuite.java
index bf76719..51313f4 100644
--- a/mllib/src/test/java/org/apache/spark/mllib/clustering/JavaGaussianMixtureSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/clustering/JavaGaussianMixtureSuite.java
@@ -42,7 +42,7 @@ public class JavaGaussianMixtureSuite extends SharedSparkSession {
JavaRDD<Vector> data = jsc.parallelize(points, 2);
GaussianMixtureModel model = new GaussianMixture().setK(2).setMaxIterations(1).setSeed(1234)
.run(data);
- assertEquals(model.gaussians().length, 2);
+ assertEquals(2, model.gaussians().length);
JavaRDD<Integer> predictions = model.predict(data);
predictions.first();
}
diff --git a/sql/catalyst/src/test/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatchSuite.java b/sql/catalyst/src/test/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatchSuite.java
index f3d82b4..b02346a 100644
--- a/sql/catalyst/src/test/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatchSuite.java
+++ b/sql/catalyst/src/test/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatchSuite.java
@@ -161,8 +161,8 @@ public class RowBasedKeyValueBatchSuite {
valueSchema, taskMemoryManager, DEFAULT_CAPACITY);
RowBasedKeyValueBatch batch2 = RowBasedKeyValueBatch.allocate(fixedKeySchema,
valueSchema, taskMemoryManager, DEFAULT_CAPACITY)) {
- Assert.assertEquals(batch1.getClass(), VariableLengthRowBasedKeyValueBatch.class);
- Assert.assertEquals(batch2.getClass(), FixedLengthRowBasedKeyValueBatch.class);
+ Assert.assertEquals(VariableLengthRowBasedKeyValueBatch.class, batch1.getClass());
+ Assert.assertEquals(FixedLengthRowBasedKeyValueBatch.class, batch2.getClass());
}
}
@@ -290,7 +290,7 @@ public class RowBasedKeyValueBatchSuite {
appendRow(batch, key, value);
}
UnsafeRow ret = appendRow(batch, key, value);
- Assert.assertEquals(batch.numRows(), 10);
+ Assert.assertEquals(10, batch.numRows());
Assert.assertNull(ret);
org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> iterator
= batch.rowIterator();
@@ -322,7 +322,7 @@ public class RowBasedKeyValueBatchSuite {
numRows++;
}
UnsafeRow ret = appendRow(batch, key, value);
- Assert.assertEquals(batch.numRows(), numRows);
+ Assert.assertEquals(numRows, batch.numRows());
Assert.assertNull(ret);
org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> iterator
= batch.rowIterator();
diff --git a/sql/core/src/test/java/test/org/apache/spark/sql/JavaBeanDeserializationSuite.java b/sql/core/src/test/java/test/org/apache/spark/sql/JavaBeanDeserializationSuite.java
index 7bf0789..c8b5555 100644
--- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaBeanDeserializationSuite.java
+++ b/sql/core/src/test/java/test/org/apache/spark/sql/JavaBeanDeserializationSuite.java
@@ -78,7 +78,7 @@ public class JavaBeanDeserializationSuite implements Serializable {
.as(encoder);
List<ArrayRecord> records = dataset.collectAsList();
- Assert.assertEquals(records, ARRAY_RECORDS);
+ Assert.assertEquals(ARRAY_RECORDS, records);
}
private static final List<MapRecord> MAP_RECORDS = new ArrayList<>();
@@ -121,7 +121,7 @@ public class JavaBeanDeserializationSuite implements Serializable {
List<MapRecord> records = dataset.collectAsList();
- Assert.assertEquals(records, MAP_RECORDS);
+ Assert.assertEquals(MAP_RECORDS, records);
}
@Test
diff --git a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java
index a05afa4..ad122d9 100644
--- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java
+++ b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java
@@ -467,8 +467,8 @@ public class JavaDataFrameSuite {
BeanWithoutGetter bean = new BeanWithoutGetter();
List<BeanWithoutGetter> data = Arrays.asList(bean);
Dataset<Row> df = spark.createDataFrame(data, BeanWithoutGetter.class);
- Assert.assertEquals(df.schema().length(), 0);
- Assert.assertEquals(df.collectAsList().size(), 1);
+ Assert.assertEquals(0, df.schema().length());
+ Assert.assertEquals(1, df.collectAsList().size());
}
@Test
diff --git a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
index 1e5f55e..d8462ae 100644
--- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
+++ b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
@@ -853,7 +853,7 @@ public class JavaDatasetSuite implements Serializable {
NestedSmallBean nestedSmallBean = new NestedSmallBean();
nestedSmallBean.setF(smallBean);
- Assert.assertEquals(ds.collectAsList(), Collections.singletonList(nestedSmallBean));
+ Assert.assertEquals(Collections.singletonList(nestedSmallBean), ds.collectAsList());
}
// Shouldn't throw runtime exception when parent object (`ClassData`) is null
@@ -864,7 +864,7 @@ public class JavaDatasetSuite implements Serializable {
Dataset<NestedSmallBean> ds = df.as(Encoders.bean(NestedSmallBean.class));
NestedSmallBean nestedSmallBean = new NestedSmallBean();
- Assert.assertEquals(ds.collectAsList(), Collections.singletonList(nestedSmallBean));
+ Assert.assertEquals(Collections.singletonList(nestedSmallBean), ds.collectAsList());
}
nullabilityCheck.expect(RuntimeException.class);
@@ -1384,7 +1384,7 @@ public class JavaDatasetSuite implements Serializable {
new BeanWithEnum(MyEnum.B, "flower boulevard"));
Encoder<BeanWithEnum> encoder = Encoders.bean(BeanWithEnum.class);
Dataset<BeanWithEnum> ds = spark.createDataset(data, encoder);
- Assert.assertEquals(ds.collectAsList(), data);
+ Assert.assertEquals(data, ds.collectAsList());
}
public static class EmptyBean implements Serializable {}
@@ -1394,8 +1394,8 @@ public class JavaDatasetSuite implements Serializable {
EmptyBean bean = new EmptyBean();
List<EmptyBean> data = Arrays.asList(bean);
Dataset<EmptyBean> df = spark.createDataset(data, Encoders.bean(EmptyBean.class));
- Assert.assertEquals(df.schema().length(), 0);
- Assert.assertEquals(df.collectAsList().size(), 1);
+ Assert.assertEquals(0, df.schema().length());
+ Assert.assertEquals(1, df.collectAsList().size());
}
public class CircularReference1Bean implements Serializable {
diff --git a/streaming/src/test/java/org/apache/spark/streaming/JavaWriteAheadLogSuite.java b/streaming/src/test/java/org/apache/spark/streaming/JavaWriteAheadLogSuite.java
index 3f4e6dd..7037de1 100644
--- a/streaming/src/test/java/org/apache/spark/streaming/JavaWriteAheadLogSuite.java
+++ b/streaming/src/test/java/org/apache/spark/streaming/JavaWriteAheadLogSuite.java
@@ -120,6 +120,6 @@ public class JavaWriteAheadLogSuite extends WriteAheadLog {
while (dataIterator.hasNext()) {
readData.add(JavaUtils.bytesToString(dataIterator.next()));
}
- Assert.assertEquals(readData, Arrays.asList("data3", "data4"));
+ Assert.assertEquals(Arrays.asList("data3", "data4"), readData);
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org