You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by aa...@apache.org on 2019/08/21 01:11:46 UTC

[hadoop] branch trunk updated: HADOOP-16496. Apply HDDS-1870 (ConcurrentModification at PrometheusMetricsSink) to Hadoop common.

This is an automated email from the ASF dual-hosted git repository.

aajisaka pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 30ce854  HADOOP-16496. Apply HDDS-1870 (ConcurrentModification at PrometheusMetricsSink) to Hadoop common.
30ce854 is described below

commit 30ce8546f13209e7272617178f3f2f8753a6c3f2
Author: Akira Ajisaka <aa...@apache.org>
AuthorDate: Tue Aug 20 16:13:34 2019 +0900

    HADOOP-16496. Apply HDDS-1870 (ConcurrentModification at PrometheusMetricsSink) to Hadoop common.
    
    This closes #1317
    
    Reviewed-by: Bharat Viswanadham <bh...@apache.org>
---
 .../metrics2/sink/PrometheusMetricsSink.java       | 24 ++++++++++++++--------
 1 file changed, 16 insertions(+), 8 deletions(-)

diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/PrometheusMetricsSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/PrometheusMetricsSink.java
index b1e8da8..10df769 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/PrometheusMetricsSink.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/PrometheusMetricsSink.java
@@ -26,8 +26,8 @@ import org.apache.hadoop.metrics2.MetricsTag;
 
 import java.io.IOException;
 import java.io.Writer;
-import java.util.HashMap;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.regex.Pattern;
 
 import org.apache.commons.lang3.StringUtils;
@@ -42,7 +42,7 @@ public class PrometheusMetricsSink implements MetricsSink {
   /**
    * Cached output lines for each metrics.
    */
-  private Map<String, String> metricLines = new HashMap<>();
+  private final Map<String, String> metricLines = new ConcurrentHashMap<>();
 
   private static final Pattern SPLIT_PATTERN =
       Pattern.compile("(?<!(^|[A-Z_]))(?=[A-Z])|(?<!^)(?=[A-Z][a-z])");
@@ -61,9 +61,13 @@ public class PrometheusMetricsSink implements MetricsSink {
             metricsRecord.name(), metrics.name());
 
         StringBuilder builder = new StringBuilder();
-        builder.append("# TYPE " + key + " " +
-            metrics.type().toString().toLowerCase() + "\n");
-        builder.append(key + "{");
+        builder.append("# TYPE ")
+            .append(key)
+            .append(" ")
+            .append(metrics.type().toString().toLowerCase())
+            .append("\n")
+            .append(key)
+            .append("{");
         String sep = "";
 
         //add tags
@@ -72,13 +76,17 @@ public class PrometheusMetricsSink implements MetricsSink {
 
           //ignore specific tag which includes sub-hierarchy
           if (!tagName.equals("numopenconnectionsperuser")) {
-            builder.append(
-                sep + tagName + "=\"" + tag.value() + "\"");
+            builder.append(sep)
+                .append(tagName)
+                .append("=\"")
+                .append(tag.value())
+                .append("\"");
             sep = ",";
           }
         }
         builder.append("} ");
         builder.append(metrics.value());
+        builder.append("\n");
         metricLines.put(key, builder.toString());
 
       }
@@ -110,7 +118,7 @@ public class PrometheusMetricsSink implements MetricsSink {
 
   public void writeMetrics(Writer writer) throws IOException {
     for (String line : metricLines.values()) {
-      writer.write(line + "\n");
+      writer.write(line);
     }
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org