You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@nifi.apache.org by jo...@apache.org on 2022/10/05 21:50:17 UTC

[nifi] branch main updated: NIFI-10592 This closes #6486. fix flaky tests using wrong map impl.

This is an automated email from the ASF dual-hosted git repository.

joewitt pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
     new bcec883344 NIFI-10592 This closes #6486. fix flaky tests using wrong map impl.
bcec883344 is described below

commit bcec883344534ca466d5c74bece2c4f2e81e3bd5
Author: Yanni <yz...@illinois.edu>
AuthorDate: Tue Oct 4 00:23:00 2022 -0500

    NIFI-10592 This closes #6486. fix flaky tests using wrong map impl.
    
    Signed-off-by: Joe Witt <jo...@apache.org>
---
 .../TestAzureLogAnalyticsProvenanceReportingTask.java      |  6 +++---
 .../reporting/prometheus/TestPrometheusRecordSink.java     | 14 ++++++++------
 .../cache/server/map/DistributedMapCacheTest.java          |  4 ++--
 3 files changed, 13 insertions(+), 11 deletions(-)

diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/test/java/org/apache/nifi/reporting/azure/loganalytics/TestAzureLogAnalyticsProvenanceReportingTask.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/test/java/org/apache/nifi/reporting/azure/loganalytics/TestAzureLogAnalyticsProvenanceReportingTask.java
index 814a461284..88515332ab 100644
--- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/test/java/org/apache/nifi/reporting/azure/loganalytics/TestAzureLogAnalyticsProvenanceReportingTask.java
+++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/test/java/org/apache/nifi/reporting/azure/loganalytics/TestAzureLogAnalyticsProvenanceReportingTask.java
@@ -26,7 +26,7 @@ import javax.json.JsonObjectBuilder;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
 import java.util.Map;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -64,12 +64,12 @@ public class TestAzureLogAnalyticsProvenanceReportingTask {
         final Map<String, Object> config = Collections.emptyMap();
         final JsonBuilderFactory factory = Json.createBuilderFactory(config);
         final JsonObjectBuilder builder = factory.createObjectBuilder();
-        Map<String, String> values = new HashMap<String, String>();
+        Map<String, String> values = new LinkedHashMap<String, String>();
         values.put("TestKeyString1", "StringValue1");
         values.put("TestKeyString2", "StringValue2");
         AzureLogAnalyticsProvenanceReportingTask.addField(builder, factory, "TestKeyString", values, true);
         javax.json.JsonObject actualJson = builder.build();
-        String expectedjsonString = "{\"TestKeyString\":{\"TestKeyString2\":\"StringValue2\",\"TestKeyString1\":\"StringValue1\"}}";
+        String expectedjsonString = "{\"TestKeyString\":{\"TestKeyString1\":\"StringValue1\",\"TestKeyString2\":\"StringValue2\"}}";
         JsonObject expectedJson = new Gson().fromJson(expectedjsonString, JsonObject.class);
         assertEquals(expectedJson.toString(), actualJson.toString());
     }
diff --git a/nifi-nar-bundles/nifi-prometheus-bundle/nifi-prometheus-reporting-task/src/test/java/org/apache/nifi/reporting/prometheus/TestPrometheusRecordSink.java b/nifi-nar-bundles/nifi-prometheus-bundle/nifi-prometheus-reporting-task/src/test/java/org/apache/nifi/reporting/prometheus/TestPrometheusRecordSink.java
index d2db516657..a1dca49749 100644
--- a/nifi-nar-bundles/nifi-prometheus-bundle/nifi-prometheus-reporting-task/src/test/java/org/apache/nifi/reporting/prometheus/TestPrometheusRecordSink.java
+++ b/nifi-nar-bundles/nifi-prometheus-bundle/nifi-prometheus-reporting-task/src/test/java/org/apache/nifi/reporting/prometheus/TestPrometheusRecordSink.java
@@ -53,7 +53,7 @@ import java.math.BigDecimal;
 import java.net.HttpURLConnection;
 import java.net.URL;
 import java.util.Arrays;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.UUID;
@@ -80,12 +80,12 @@ public class TestPrometheusRecordSink {
         );
         RecordSchema recordSchema = new SimpleRecordSchema(recordFields);
 
-        Map<String, Object> row1 = new HashMap<>();
+        Map<String, Object> row1 = new LinkedHashMap<>();
         row1.put("field1", 15);
         row1.put("field2", BigDecimal.valueOf(12.34567D));
         row1.put("field3", "Hello");
 
-        Map<String, Object> row2 = new HashMap<>();
+        Map<String, Object> row2 = new LinkedHashMap<>();
         row2.put("field1", 6);
         row2.put("field2", BigDecimal.valueOf(0.1234567890123456789D));
         row2.put("field3", "World!");
@@ -95,7 +95,7 @@ public class TestPrometheusRecordSink {
                 new MapRecord(recordSchema, row2)
         ));
 
-        Map<String, String> attributes = new HashMap<>();
+        Map<String, String> attributes = new LinkedHashMap<>();
         attributes.put("a", "Hello");
         WriteResult writeResult = sink.sendData(recordSet, attributes, true);
         assertNotNull(writeResult);
@@ -104,8 +104,10 @@ public class TestPrometheusRecordSink {
 
 
         final String content = getMetrics();
-        assertTrue(content.contains("field1{field3=\"Hello\",} 15.0\nfield1{field3=\"World!\",} 6.0\n"));
-        assertTrue(content.contains("field2{field3=\"Hello\",} 12.34567\nfield2{field3=\"World!\",} 0.12345678901234568\n"));
+        assertTrue(content.contains("field1{field3=\"Hello\",} 15.0\nfield1{field3=\"World!\",} 6.0\n")
+                || content.contains("field1{field3=\"World!\",} 6.0\nfield1{field3=\"Hello\",} 15.0\n"));
+        assertTrue(content.contains("field2{field3=\"Hello\",} 12.34567\nfield2{field3=\"World!\",} 0.12345678901234568\n")
+                || content.contains("field2{field3=\"World!\",} 0.12345678901234568\nfield2{field3=\"Hello\",} 12.34567\n"));
 
         try {
             sink.onStopped();
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/test/java/org/apache/nifi/distributed/cache/server/map/DistributedMapCacheTest.java b/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/test/java/org/apache/nifi/distributed/cache/server/map/DistributedMapCacheTest.java
index d292159425..7bae0d9dcc 100644
--- a/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/test/java/org/apache/nifi/distributed/cache/server/map/DistributedMapCacheTest.java
+++ b/nifi-nar-bundles/nifi-standard-services/nifi-distributed-cache-services-bundle/nifi-distributed-cache-server/src/test/java/org/apache/nifi/distributed/cache/server/map/DistributedMapCacheTest.java
@@ -36,7 +36,7 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
-import java.util.HashSet;
+import java.util.LinkedHashSet;
 import java.util.Map;
 import java.util.Set;
 
@@ -149,7 +149,7 @@ public class DistributedMapCacheTest {
         for (int i = 0; (i < 3); ++i) {
             client.put(key + i, value + i, serializer, serializer);
         }
-        final Set<String> keys = new HashSet<>(Arrays.asList("keySubMap0", "keySubMap1", "keySubMap2"));
+        final Set<String> keys = new LinkedHashSet<>(Arrays.asList("keySubMap0", "keySubMap1", "keySubMap2"));
         final Map<String, String> subMap = client.subMap(keys, serializer, deserializer);
         assertEquals(3, subMap.size());
         for (int i = 0; (i < 3); ++i) {