You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@jmeter.apache.org by fs...@apache.org on 2015/12/11 21:29:08 UTC

svn commit: r1719533 - /jmeter/trunk/src/core/org/apache/jmeter/report/processor/ExternalSampleSorter.java

Author: fschumacher
Date: Fri Dec 11 20:29:08 2015
New Revision: 1719533

URL: http://svn.apache.org/viewvc?rev=1719533&view=rev
Log:
The usage of volatile indicates, that chunkedSampleCount and inputSampleCount gets
accessed potentially by multiple threads. In that case the ++ operation is not
safe to use. So switch to AtomicLong instead.

Modified:
    jmeter/trunk/src/core/org/apache/jmeter/report/processor/ExternalSampleSorter.java

Modified: jmeter/trunk/src/core/org/apache/jmeter/report/processor/ExternalSampleSorter.java
URL: http://svn.apache.org/viewvc/jmeter/trunk/src/core/org/apache/jmeter/report/processor/ExternalSampleSorter.java?rev=1719533&r1=1719532&r2=1719533&view=diff
==============================================================================
--- jmeter/trunk/src/core/org/apache/jmeter/report/processor/ExternalSampleSorter.java (original)
+++ jmeter/trunk/src/core/org/apache/jmeter/report/processor/ExternalSampleSorter.java Fri Dec 11 20:29:08 2015
@@ -28,6 +28,7 @@ import java.util.concurrent.LinkedBlocki
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.jmeter.report.core.ArgumentNullException;
 import org.apache.jmeter.report.core.CsvFile;
@@ -98,9 +99,9 @@ public class ExternalSampleSorter extend
 
     private boolean parallelize;
 
-    private volatile long chunkedSampleCount;
+    private AtomicLong chunkedSampleCount = new AtomicLong();
 
-    private volatile long inputSampleCount;
+    private AtomicLong inputSampleCount = new AtomicLong();
 
     private LinkedList<File> chunks;
 
@@ -303,8 +304,8 @@ public class ExternalSampleSorter extend
         File workDir = getWorkingDirectory();
         workDir.mkdir();
         this.pool.prestartAllCoreThreads();
-        inputSampleCount = 0;
-        chunkedSampleCount = 0;
+        inputSampleCount.set(0);;
+        chunkedSampleCount.set(0);
         chunks = new LinkedList<>();
         samples = new LinkedList<>();
         sampleMetadata = getConsumedMetadata(0);
@@ -314,7 +315,7 @@ public class ExternalSampleSorter extend
     @Override
     public void consume(Sample s, int channel) {
         samples.add(s);
-        inputSampleCount++;
+        inputSampleCount.incrementAndGet();
         if (samples.size() >= chunkSize) {
             chunks.add(sortAndDump(samples, sampleMetadata));
             samples.clear();
@@ -327,11 +328,11 @@ public class ExternalSampleSorter extend
             chunks.add(sortAndDump(samples, sampleMetadata));
         }
         if (log.isDebugEnabled()) {
-            log.debug("sort(): " + inputSampleCount
-                    + " samples read from input, " + chunkedSampleCount
+            log.debug("sort(): " + inputSampleCount.longValue()
+                    + " samples read from input, " + chunkedSampleCount.longValue()
                     + " samples written to chunk files");
-            if (inputSampleCount != chunkedSampleCount) {
-                log.error("Failure !");
+            if (inputSampleCount.get() != chunkedSampleCount.get()) {
+                log.error("Failure! Number of samples read from input and written to chunk files differ");
             } else {
                 log.info("chunked samples dumps succeeded.");
             }
@@ -374,7 +375,7 @@ public class ExternalSampleSorter extend
         try {
             for (Sample sample : sortedSamples) {
                 csvWriter.write(sample);
-                chunkedSampleCount++;
+                chunkedSampleCount.incrementAndGet();
             }
         } finally {
             csvWriter.close();