You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ko...@apache.org on 2018/03/05 06:36:55 UTC

[4/8] lucene-solr:master: SOLR-11795: Add Solr metrics exporter for Prometheus

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6d66fc04/solr/contrib/prometheus-exporter/ivy.xml
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/ivy.xml b/solr/contrib/prometheus-exporter/ivy.xml
new file mode 100644
index 0000000..a8f8894
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/ivy.xml
@@ -0,0 +1,40 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one
+   or more contributor license agreements.  See the NOTICE file
+   distributed with this work for additional information
+   regarding copyright ownership.  The ASF licenses this file
+   to you under the Apache License, Version 2.0 (the
+   "License"); you may not use this file except in compliance
+   with the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing,
+   software distributed under the License is distributed on an
+   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+   KIND, either express or implied.  See the License for the
+   specific language governing permissions and limitations
+   under the License.    
+-->
+<ivy-module version="2.0">
+  <info organisation="org.apache.solr" module="prometheus"/>
+  <configurations defaultconfmapping="compile->master;test->master">
+    <conf name="compile" transitive="false"/>
+    <conf name="test" transitive="false"/>
+  </configurations>
+  <dependencies>
+    <dependency org="io.prometheus" name="simpleclient" rev="${/io.prometheus/simpleclient}" conf="compile"/>
+    <dependency org="io.prometheus" name="simpleclient_common" rev="${/io.prometheus/simpleclient_common}" conf="compile"/>
+    <dependency org="io.prometheus" name="simpleclient_httpserver" rev="${/io.prometheus/simpleclient_httpserver}" conf="compile"/>
+    <dependency org="com.fasterxml.jackson.core" name="jackson-core" rev="${/com.fasterxml.jackson.core/jackson-core}" conf="compile"/>
+    <dependency org="com.fasterxml.jackson.core" name="jackson-databind" rev="${/com.fasterxml.jackson.core/jackson-databind}" conf="compile"/>
+    <dependency org="com.fasterxml.jackson.core" name="jackson-annotations" rev="${/com.fasterxml.jackson.core/jackson-annotations}" conf="compile"/>
+    <dependency org="net.thisptr" name="jackson-jq" rev="${/net.thisptr/jackson-jq}" conf="compile"/>
+    <dependency org="net.sourceforge.argparse4j" name="argparse4j" rev="${/net.sourceforge.argparse4j/argparse4j}" conf="compile"/>
+    <dependency org="org.slf4j" name="slf4j-api" rev="${/org.slf4j/slf4j-api}" conf="compile"/>
+    <dependency org="org.slf4j" name="slf4j-log4j12" rev="${/org.slf4j/slf4j-log4j12}" conf="compile"/>
+    <dependency org="log4j" name="log4j" rev="${/log4j/log4j}" conf="compile"/>
+
+    <exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
+  </dependencies>
+</ivy-module>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6d66fc04/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SolrCollector.java
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SolrCollector.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SolrCollector.java
new file mode 100644
index 0000000..66bae42
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SolrCollector.java
@@ -0,0 +1,463 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.prometheus.collector;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.solr.core.Config;
+import org.apache.solr.prometheus.scraper.SolrScraper;
+import io.prometheus.client.Collector;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.impl.NoOpResponseParser;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.CoreAdminRequest;
+import org.apache.solr.common.params.CoreAdminParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.ExecutorUtil;
+import org.apache.solr.util.DOMUtil;
+import org.apache.solr.util.DefaultSolrThreadFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.w3c.dom.Node;
+
+import java.lang.invoke.MethodHandles;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * SolrCollector
+ */
+public class SolrCollector extends Collector implements Collector.Describable {
+  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private SolrClient solrClient;
+  private Config config;
+  private int numThreads;
+  private ExecutorService executorService;
+  private static ObjectMapper om = new ObjectMapper();
+
+  /**
+   * Constructor.
+   */
+  public SolrCollector(SolrClient solrClient, Config config, int numThreads) {
+    this.solrClient = solrClient;
+    this.config = config;
+    this.numThreads = numThreads;
+  }
+
+  /**
+   * Describe scrape status.
+   */
+  public List<Collector.MetricFamilySamples> describe() {
+    List<MetricFamilySamples> metricFamilies = new ArrayList<>();
+    metricFamilies.add(new MetricFamilySamples("solr_exporter_duration_seconds", Type.GAUGE, "Time this Solr scrape took, in seconds.", new ArrayList<>()));
+    return metricFamilies;
+  }
+
+  /**
+   * Collect samples.
+   */
+  public List<MetricFamilySamples> collect() {
+    // start time of scraping.
+    long startTime = System.nanoTime();
+
+    this.executorService = ExecutorUtil.newMDCAwareFixedThreadPool(numThreads, new DefaultSolrThreadFactory("solr-exporter"));
+
+    Map<String, MetricFamilySamples> metricFamilySamplesMap = new LinkedHashMap<>();
+
+    List<Future<Map<String, MetricFamilySamples>>> futureList = new ArrayList<>();
+
+    try {
+      // Ping
+      Node pingNode = this.config.getNode("/config/rules/ping", true);
+      if (pingNode != null) {
+        NamedList pingNL = DOMUtil.childNodesToNamedList(pingNode);
+        List<NamedList> requestsNL = pingNL.getAll("request");
+
+        if (this.solrClient instanceof CloudSolrClient) {
+          // in SolrCloud mode
+          List<HttpSolrClient> httpSolrClients = new ArrayList<>();
+          try {
+            httpSolrClients = getHttpSolrClients((CloudSolrClient) this.solrClient);
+            for (HttpSolrClient httpSolrClient : httpSolrClients) {
+              for (NamedList requestNL : requestsNL) {
+                String coreName = (String) ((NamedList) requestNL.get("query")).get("core");
+                String collectionName = (String) ((NamedList) requestNL.get("query")).get("collection");
+                if (coreName == null && collectionName == null) {
+                  try {
+                    List<String> cores = getCores(httpSolrClient);
+                    for (String core : cores) {
+                      LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
+                      LinkedHashMap query = (LinkedHashMap) conf.get("query");
+                      if (query != null) {
+                        query.put("core", core);
+                      }
+
+                      SolrScraper scraper = new SolrScraper(httpSolrClient, conf);
+                      Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
+                      futureList.add(future);
+                    }
+                  } catch (SolrServerException | IOException e) {
+                    this.logger.error("failed to get cores: " + e.getMessage());
+                  }
+                } else if (coreName != null && collectionName == null) {
+                  LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
+                  SolrScraper scraper = new SolrScraper(httpSolrClient, conf);
+                  Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
+                  futureList.add(future);
+                }
+              }
+            }
+
+            // wait for HttpColeClients
+            for (Future<Map<String, MetricFamilySamples>> future : futureList) {
+              try {
+                Map<String, MetricFamilySamples> m = future.get(60, TimeUnit.SECONDS);
+                mergeMetrics(metricFamilySamplesMap, m);
+              } catch (InterruptedException | ExecutionException | TimeoutException e) {
+                this.logger.error(e.getMessage());
+              }
+            }
+          } catch (SolrServerException | IOException e) {
+            this.logger.error("failed to get HttpSolrClients: " + e.getMessage());
+          } finally {
+            for (HttpSolrClient httpSolrClient : httpSolrClients) {
+              try {
+                httpSolrClient.close();
+              } catch (IOException e) {
+                this.logger.error("failed to close HttpSolrClient: " + e.getMessage());
+              }
+            }
+          }
+
+          // collection
+          for (NamedList requestNL : requestsNL) {
+            String coreName = (String) ((NamedList) requestNL.get("query")).get("core");
+            String collectionName = (String) ((NamedList) requestNL.get("query")).get("collection");
+            if (coreName == null && collectionName == null) {
+              try {
+                List<String> collections = getCollections((CloudSolrClient) this.solrClient);
+                for (String collection : collections) {
+                  LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
+                  LinkedHashMap query = (LinkedHashMap) conf.get("query");
+                  if (query != null) {
+                    query.put("collection", collection);
+                  }
+
+                  SolrScraper scraper = new SolrScraper(this.solrClient, conf);
+                  Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
+                  futureList.add(future);
+                }
+              } catch (SolrServerException | IOException e) {
+                this.logger.error("failed to get cores: " + e.getMessage());
+              }
+            } else if (coreName == null && collectionName != null) {
+              LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
+
+              SolrScraper scraper = new SolrScraper(this.solrClient, conf);
+              Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
+              futureList.add(future);
+            }
+          }
+        } else {
+          // in Standalone mode
+          for (NamedList requestNL : requestsNL) {
+            String coreName = (String) ((NamedList) requestNL.get("query")).get("core");
+            if (coreName == null) {
+              try {
+                List<String> cores = getCores((HttpSolrClient) this.solrClient);
+                for (String core : cores) {
+                  LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
+                  LinkedHashMap query = (LinkedHashMap) conf.get("query");
+                  if (query != null) {
+                    query.put("core", core);
+                  }
+
+                  SolrScraper scraper = new SolrScraper(this.solrClient, conf);
+                  Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
+                  futureList.add(future);
+                }
+              } catch (SolrServerException | IOException e) {
+                this.logger.error("failed to get cores: " + e.getMessage());
+              }
+            } else {
+              LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
+
+              SolrScraper scraper = new SolrScraper(this.solrClient, conf);
+              Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
+              futureList.add(future);
+            }
+          }
+        }
+      }
+
+      // Metrics
+      Node metricsNode = this.config.getNode("/config/rules/metrics", false);
+      if (metricsNode != null) {
+        NamedList metricsNL = DOMUtil.childNodesToNamedList(metricsNode);
+        List<NamedList> requestsNL = metricsNL.getAll("request");
+
+        if (this.solrClient instanceof CloudSolrClient) {
+          // in SolrCloud mode
+          List<HttpSolrClient> httpSolrClients = new ArrayList<>();
+          try {
+            httpSolrClients = getHttpSolrClients((CloudSolrClient) this.solrClient);
+            for (HttpSolrClient httpSolrClient : httpSolrClients) {
+              for (NamedList requestNL : requestsNL) {
+                LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
+
+                SolrScraper scraper = new SolrScraper(httpSolrClient, conf);
+                Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
+                futureList.add(future);
+              }
+            }
+
+            // wait for HttpColeClients
+            for (Future<Map<String, MetricFamilySamples>> future : futureList) {
+              try {
+                Map<String, MetricFamilySamples> m = future.get(60, TimeUnit.SECONDS);
+                mergeMetrics(metricFamilySamplesMap, m);
+              } catch (InterruptedException | ExecutionException | TimeoutException e) {
+                this.logger.error(e.getMessage());
+              }
+            }
+          } catch (SolrServerException | IOException e) {
+            this.logger.error(e.getMessage());
+          } finally {
+            for (HttpSolrClient httpSolrClient : httpSolrClients) {
+              try {
+                httpSolrClient.close();
+              } catch (IOException e) {
+                this.logger.error(e.getMessage());
+              }
+            }
+          }
+        } else {
+          // in Standalone mode
+          for (NamedList requestNL : requestsNL) {
+            LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
+
+            SolrScraper scraper = new SolrScraper(this.solrClient, conf);
+            Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
+            futureList.add(future);
+          }
+        }
+      }
+
+      // Collections
+      Node collectionsNode = this.config.getNode("/config/rules/collections", false);
+      if (collectionsNode != null && this.solrClient instanceof CloudSolrClient) {
+        NamedList collectionsNL = DOMUtil.childNodesToNamedList(collectionsNode);
+        List<NamedList> requestsNL = collectionsNL.getAll("request");
+
+        for (NamedList requestNL : requestsNL) {
+          LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
+
+          SolrScraper scraper = new SolrScraper(this.solrClient, conf);
+          Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
+          futureList.add(future);
+        }
+      }
+
+      // Search
+      Node searchNode = this.config.getNode("/config/rules/search", false);
+      if (searchNode != null) {
+        NamedList searchNL = DOMUtil.childNodesToNamedList(searchNode);
+        List<NamedList> requestsNL = searchNL.getAll("request");
+
+        for (NamedList requestNL : requestsNL) {
+          LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
+
+          SolrScraper scraper = new SolrScraper(this.solrClient, conf);
+          Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
+          futureList.add(future);
+        }
+      }
+
+      // get future
+      for (Future<Map<String, MetricFamilySamples>> future : futureList) {
+        try {
+          Map<String, MetricFamilySamples> m = future.get(60, TimeUnit.SECONDS);
+          mergeMetrics(metricFamilySamplesMap, m);
+        } catch (InterruptedException | ExecutionException | TimeoutException e) {
+          this.logger.error(e.getMessage());
+        }
+      }
+    } catch (Exception e) {
+      this.logger.error(e.getMessage());
+      e.printStackTrace();
+    }
+
+    // return value
+    List<MetricFamilySamples> metricFamiliesSamplesList = new ArrayList<>();
+
+    // add solr metrics
+    for (String gaugeMetricName : metricFamilySamplesMap.keySet()) {
+      MetricFamilySamples metricFamilySamples = metricFamilySamplesMap.get(gaugeMetricName);
+      if (metricFamilySamples.samples.size() > 0) {
+        metricFamiliesSamplesList.add(metricFamilySamples);
+      }
+    }
+
+    // add scrape duration metric
+    List<MetricFamilySamples.Sample> durationSample = new ArrayList<>();
+    durationSample.add(new MetricFamilySamples.Sample("solr_exporter_duration_seconds", new ArrayList<>(), new ArrayList<>(), (System.nanoTime() - startTime) / 1.0E9));
+    metricFamiliesSamplesList.add(new MetricFamilySamples("solr_exporter_duration_seconds", Type.GAUGE, "Time this Solr exporter took, in seconds.", durationSample));
+
+    this.executorService.shutdown();
+
+    return metricFamiliesSamplesList;
+  }
+
+  /**
+   * Merge metrics.
+   */
+  private Map<String, MetricFamilySamples> mergeMetrics(Map<String, MetricFamilySamples> metrics1, Map<String, MetricFamilySamples> metrics2) {
+    // marge MetricFamilySamples
+    for (String k : metrics2.keySet()) {
+      if (metrics1.containsKey(k)) {
+        for (MetricFamilySamples.Sample sample : metrics2.get(k).samples) {
+          if (!metrics1.get(k).samples.contains(sample)) {
+            metrics1.get(k).samples.add(sample);
+          }
+        }
+      } else {
+        metrics1.put(k, metrics2.get(k));
+      }
+    }
+
+    return metrics1;
+  }
+
+
+  /**
+   * Get target cores via CoreAdminAPI.
+   */
+  public static List<String> getCores(HttpSolrClient httpSolrClient) throws SolrServerException, IOException {
+    List<String> cores = new ArrayList<>();
+
+    NoOpResponseParser responseParser = new NoOpResponseParser();
+    responseParser.setWriterType("json");
+
+    httpSolrClient.setParser(responseParser);
+
+    CoreAdminRequest coreAdminRequest = new CoreAdminRequest();
+    coreAdminRequest.setAction(CoreAdminParams.CoreAdminAction.STATUS);
+    coreAdminRequest.setIndexInfoNeeded(false);
+
+    NamedList<Object> coreAdminResponse = httpSolrClient.request(coreAdminRequest);
+
+    JsonNode statusJsonNode = om.readTree((String) coreAdminResponse.get("response")).get("status");
+
+    for (Iterator<JsonNode> i = statusJsonNode.iterator(); i.hasNext(); ) {
+      String core = i.next().get("name").textValue();
+      if (!cores.contains(core)) {
+        cores.add(core);
+      }
+    }
+
+    return cores;
+  }
+
+  /**
+   * Get target cores via CollectionsAPI.
+   */
+  public static List<String> getCollections(CloudSolrClient cloudSolrClient) throws SolrServerException, IOException {
+    List<String> collections = new ArrayList<>();
+
+    NoOpResponseParser responseParser = new NoOpResponseParser();
+    responseParser.setWriterType("json");
+
+    cloudSolrClient.setParser(responseParser);
+
+    CollectionAdminRequest collectionAdminRequest = new CollectionAdminRequest.List();
+
+    NamedList<Object> collectionAdminResponse = cloudSolrClient.request(collectionAdminRequest);
+
+    JsonNode collectionsJsonNode = om.readTree((String) collectionAdminResponse.get("response")).get("collections");
+
+    for (Iterator<JsonNode> i = collectionsJsonNode.iterator(); i.hasNext(); ) {
+      String collection = i.next().textValue();
+      if (!collections.contains(collection)) {
+        collections.add(collection);
+      }
+    }
+
+    return collections;
+  }
+
+  /**
+   * Get base urls via CollectionsAPI.
+   */
+  private List<String> getBaseUrls(CloudSolrClient cloudSolrClient) throws SolrServerException, IOException {
+    List<String> baseUrls = new ArrayList<>();
+
+    NoOpResponseParser responseParser = new NoOpResponseParser();
+    responseParser.setWriterType("json");
+
+    cloudSolrClient.setParser(responseParser);
+
+    CollectionAdminRequest collectionAdminRequest = new CollectionAdminRequest.ClusterStatus();
+
+    NamedList<Object> collectionAdminResponse = cloudSolrClient.request(collectionAdminRequest);
+
+    List<JsonNode> baseUrlJsonNode = om.readTree((String) collectionAdminResponse.get("response")).findValues("base_url");
+
+    for (Iterator<JsonNode> i = baseUrlJsonNode.iterator(); i.hasNext(); ) {
+      String baseUrl = i.next().textValue();
+      if (!baseUrls.contains(baseUrl)) {
+        baseUrls.add(baseUrl);
+      }
+    }
+
+    return baseUrls;
+  }
+
+  /**
+   * Get HTTP Solr Clients
+   */
+  private List<HttpSolrClient> getHttpSolrClients(CloudSolrClient cloudSolrClient) throws SolrServerException, IOException {
+    List<HttpSolrClient> solrClients = new ArrayList<>();
+
+    for (String baseUrl : getBaseUrls(cloudSolrClient)) {
+      NoOpResponseParser responseParser = new NoOpResponseParser();
+      responseParser.setWriterType("json");
+
+      HttpSolrClient.Builder builder = new HttpSolrClient.Builder();
+      builder.withBaseSolrUrl(baseUrl);
+
+      HttpSolrClient httpSolrClient = builder.build();
+      httpSolrClient.setParser(responseParser);
+
+      solrClients.add(httpSolrClient);
+    }
+
+    return solrClients;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6d66fc04/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/package-info.java
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/package-info.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/package-info.java
new file mode 100644
index 0000000..63b17fe
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/** 
+ * Solr metrics collector.
+ */
+package org.apache.solr.prometheus.collector;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6d66fc04/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java
new file mode 100644
index 0000000..752f176
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java
@@ -0,0 +1,254 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.prometheus.exporter;
+
+import javax.xml.parsers.ParserConfigurationException;
+
+import io.prometheus.client.CollectorRegistry;
+import io.prometheus.client.Counter;
+import io.prometheus.client.exporter.HTTPServer;
+import net.sourceforge.argparse4j.ArgumentParsers;
+import net.sourceforge.argparse4j.inf.ArgumentParser;
+import net.sourceforge.argparse4j.inf.ArgumentParserException;
+import net.sourceforge.argparse4j.inf.Namespace;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.impl.NoOpResponseParser;
+import org.apache.solr.core.Config;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.prometheus.collector.SolrCollector;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.xml.sax.SAXException;
+
+import java.lang.invoke.MethodHandles;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * SolrExporter
+ */
+public class SolrExporter {
+  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private static final String[] ARG_PORT_FLAGS = { "-p", "--port" };
+  private static final String ARG_PORT_METAVAR = "PORT";
+  private static final String ARG_PORT_DEST = "port";
+  private static final Integer ARG_PORT_DEFAULT = 9983;
+  private static final String ARG_PORT_HELP = "solr-exporter listen port";
+
+  private static final String[] ARG_BASE_URL_FLAGS = { "-b", "--baseurl" };
+  private static final String ARG_BASE_URL_METAVAR = "BASE_URL";
+  private static final String ARG_BASE_URL_DEST = "baseUrl";
+  private static final String ARG_BASE_URL_DEFAULT = "";
+  private static final String ARG_BASE_URL_HELP = "specify Solr base URL when connecting to Solr in standalone mode (for example 'http://localhost:8983/solr')";
+
+  private static final String[] ARG_ZK_HOST_FLAGS = { "-z", "--zkhost" };
+  private static final String ARG_ZK_HOST_METAVAR = "ZK_HOST";
+  private static final String ARG_ZK_HOST_DEST = "zkHost";
+  private static final String ARG_ZK_HOST_DEFAULT = "";
+  private static final String ARG_ZK_HOST_HELP = "specify ZooKeeper connection string when connecting to Solr in SolrCloud mode (for example 'localhost:2181/solr')";
+
+  private static final String[] ARG_CONFIG_FLAGS = { "-f", "--config-file" };
+  private static final String ARG_CONFIG_METAVAR = "CONFIG";
+  private static final String ARG_CONFIG_DEST = "configFile";
+  private static final String ARG_CONFIG_DEFAULT = "./conf/solr-exporter-config.xml";
+  private static final String ARG_CONFIG_HELP = "specify configuration file";
+
+  private static final String[] ARG_NUM_THREADS_FLAGS = { "-n", "--num-thread" };
+  private static final String ARG_NUM_THREADS_METAVAR = "NUM_THREADS";
+  private static final String ARG_NUM_THREADS_DEST = "numThreads";
+  private static final Integer ARG_NUM_THREADS_DEFAULT = 1;
+  private static final String ARG_NUM_THREADS_HELP = "specify number of threads";
+
+  private int port;
+  private SolrClient solrClient;
+  private Config config;
+  private int numThreads;
+
+  CollectorRegistry registry = new CollectorRegistry();
+
+  private HTTPServer httpServer;
+  private SolrCollector collector;
+
+  private SolrResourceLoader loader;
+
+  public static final Counter scrapeErrorTotal = Counter.build()
+      .name("solr_exporter_scrape_error_total")
+      .help("Number of scrape error.").register();
+
+  /**
+   * Constructor.
+   */
+  public SolrExporter(int port, String connStr, Path configPath, int numThreads) throws ParserConfigurationException, SAXException, IOException {
+    this(port, createClient(connStr), configPath, numThreads);
+  }
+
+  /**
+   * Constructor.
+   */
+  public SolrExporter(int port, SolrClient solrClient, Path configPath, int numThreads) throws ParserConfigurationException, SAXException, IOException {
+    super();
+
+    this.loader = new SolrResourceLoader(configPath.getParent());
+
+    this.port = port;
+    this.solrClient = solrClient;
+    this.config = new Config(this.loader, configPath.getFileName().toString());
+    this.numThreads = numThreads;
+  }
+
+  /**
+   * Start HTTP server for exporting Solr metrics.
+   */
+  public void start() throws IOException {
+    this.collector = new SolrCollector(solrClient, config, numThreads);
+    this.registry.register(this.collector);
+    this.registry.register(scrapeErrorTotal);
+    this.httpServer = new HTTPServer(new InetSocketAddress(port), this.registry);
+  }
+
+  /**
+   * Stop HTTP server for exporting Solr metrics.
+   */
+  public void stop() {
+    this.httpServer.stop();
+    this.registry.unregister(this.collector);
+//    this.collector.shutdown();
+  }
+
+  /**
+   * Create Solr client
+   */
+  private static SolrClient createClient(String connStr) {
+    SolrClient solrClient;
+
+    Pattern baseUrlPattern = Pattern.compile("^https?:\\/\\/[\\w\\/:%#\\$&\\?\\(\\)~\\.=\\+\\-]+$");
+    Pattern zkHostPattern = Pattern.compile("^(?<host>[^\\/]+)(?<chroot>|(?:\\/.*))$");
+    Matcher matcher;
+
+    matcher = baseUrlPattern.matcher(connStr);
+    if (matcher.matches()) {
+      NoOpResponseParser responseParser = new NoOpResponseParser();
+      responseParser.setWriterType("json");
+
+      HttpSolrClient.Builder builder = new HttpSolrClient.Builder();
+      builder.withBaseSolrUrl(connStr);
+
+      HttpSolrClient httpSolrClient = builder.build();
+      httpSolrClient.setParser(responseParser);
+
+      solrClient = httpSolrClient;
+    } else {
+      String host = "";
+      String chroot = "";
+
+      matcher = zkHostPattern.matcher(connStr);
+      if (matcher.matches()) {
+        host = matcher.group("host") != null ? matcher.group("host") : "";
+        chroot = matcher.group("chroot") != null ? matcher.group("chroot") : "";
+      }
+
+      NoOpResponseParser responseParser = new NoOpResponseParser();
+      responseParser.setWriterType("json");
+
+      CloudSolrClient.Builder builder = new CloudSolrClient.Builder();
+      if (host.contains(",")) {
+        List<String> hosts = new ArrayList<>();
+        for (String h : host.split(",")) {
+          if (h != null && !h.equals("")) {
+            hosts.add(h.trim());
+          }
+        }
+        builder.withZkHost(hosts);
+      } else {
+        builder.withZkHost(host);
+      }
+      if (chroot.equals("")) {
+        builder.withZkChroot("/");
+      } else {
+        builder.withZkChroot(chroot);
+      }
+
+      CloudSolrClient cloudSolrClient = builder.build();
+      cloudSolrClient.setParser(responseParser);
+
+      solrClient = cloudSolrClient;
+    }
+
+    return solrClient;
+  }
+
+  /**
+   * Entry point of SolrExporter.
+   */
+  public static void main( String[] args ) {
+    ArgumentParser parser = ArgumentParsers.newArgumentParser(SolrCollector.class.getSimpleName())
+        .description("Prometheus exporter for Apache Solr.");
+
+    parser.addArgument(ARG_PORT_FLAGS)
+        .metavar(ARG_PORT_METAVAR).dest(ARG_PORT_DEST).type(Integer.class)
+        .setDefault(ARG_PORT_DEFAULT).help(ARG_PORT_HELP);
+
+    parser.addArgument(ARG_BASE_URL_FLAGS)
+        .metavar(ARG_BASE_URL_METAVAR).dest(ARG_BASE_URL_DEST).type(String.class)
+        .setDefault(ARG_BASE_URL_DEFAULT).help(ARG_BASE_URL_HELP);
+
+    parser.addArgument(ARG_ZK_HOST_FLAGS)
+        .metavar(ARG_ZK_HOST_METAVAR).dest(ARG_ZK_HOST_DEST).type(String.class)
+        .setDefault(ARG_ZK_HOST_DEFAULT).help(ARG_ZK_HOST_HELP);
+
+    parser.addArgument(ARG_CONFIG_FLAGS)
+        .metavar(ARG_CONFIG_METAVAR).dest(ARG_CONFIG_DEST).type(String.class)
+        .setDefault(ARG_CONFIG_DEFAULT).help(ARG_CONFIG_HELP);
+
+    parser.addArgument(ARG_NUM_THREADS_FLAGS)
+        .metavar(ARG_NUM_THREADS_METAVAR).dest(ARG_NUM_THREADS_DEST).type(Integer.class)
+        .setDefault(ARG_NUM_THREADS_DEFAULT).help(ARG_NUM_THREADS_HELP);
+
+    try {
+      Namespace res = parser.parseArgs(args);
+
+      int port = res.getInt(ARG_PORT_DEST);
+
+      String connStr = "http://localhost:8983/solr";
+      if (!res.getString(ARG_BASE_URL_DEST).equals("")) {
+        connStr = res.getString(ARG_BASE_URL_DEST);
+      } else if (!res.getString(ARG_ZK_HOST_DEST).equals("")) {
+        connStr = res.getString(ARG_ZK_HOST_DEST);
+      }
+
+      Path configPath = Paths.get(res.getString(ARG_CONFIG_DEST));
+      int numThreads = res.getInt(ARG_NUM_THREADS_DEST);
+
+      SolrExporter solrExporter = new SolrExporter(port, connStr, configPath, numThreads);
+      solrExporter.start();
+      logger.info("Start server");
+    } catch (ParserConfigurationException | SAXException | IOException e) {
+      logger.error("Start server failed: " + e.toString());
+    } catch (ArgumentParserException e) {
+      parser.handleError(e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6d66fc04/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/package-info.java
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/package-info.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/package-info.java
new file mode 100644
index 0000000..81f33f9
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/** 
+ * Solr metrics exporter.
+ */
+package org.apache.solr.prometheus.exporter;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6d66fc04/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java
new file mode 100644
index 0000000..dd74384
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java
@@ -0,0 +1,214 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.prometheus.scraper;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.solr.prometheus.exporter.SolrExporter;
+import io.prometheus.client.Collector;
+import net.thisptr.jackson.jq.JsonQuery;
+import net.thisptr.jackson.jq.exception.JsonQueryException;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.QueryRequest;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.lang.invoke.MethodHandles;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Callable;
+
+/**
+ * SolrScraper
+ */
+public class SolrScraper implements Callable<Map<String, Collector.MetricFamilySamples>> {
+  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private SolrClient solrClient;
+  private LinkedHashMap conf;
+
+  private List<String> labelNames;
+  private List<String> labelValues;
+
+  /**
+   * Constructor.
+   */
+  public SolrScraper(SolrClient solrClient, LinkedHashMap conf) {
+    super();
+
+    this.solrClient = solrClient;
+    this.conf = conf;
+
+    this.labelNames = new ArrayList<>();
+    this.labelValues = new ArrayList<>();
+  }
+
+  /**
+   * Execute collectResponse
+   */
+  @Override
+  public Map<String, Collector.MetricFamilySamples> call() throws Exception {
+    return collectResponse(this.solrClient, this.conf);
+  }
+
+  /**
+   * Collect facet count.
+   */
+  public Map<String, Collector.MetricFamilySamples> collectResponse(SolrClient solrClient, LinkedHashMap conf) {
+    Map<String, Collector.MetricFamilySamples> metricFamilySamplesMap = new LinkedHashMap<>();
+
+    try {
+      // create Solr request parameters
+      LinkedHashMap confQuery = (LinkedHashMap) conf.get("query");
+      LinkedHashMap confParams = (LinkedHashMap) confQuery.get("params");
+      String path = (String) confQuery.get("path");
+      String core = (String) confQuery.get("core");
+      String collection = (String) confQuery.get("collection");
+      ArrayList<String> jsonQueries = (ArrayList<String>) conf.get("jsonQueries");
+
+      ModifiableSolrParams params = new ModifiableSolrParams();
+      if (confParams != null) {
+        for (Object k : confParams.keySet()) {
+          String name = (String) k;
+          String value = (String) confParams.get(k);
+          params.add(name, value);
+        }
+      }
+
+      // create Solr queryConfig request
+      QueryRequest queryRequest = new QueryRequest(params);
+      queryRequest.setPath(path);
+
+      // request to Solr
+      NamedList<Object> queryResponse = null;
+      try {
+        if (core == null && collection == null) {
+          queryResponse = solrClient.request(queryRequest);
+        } else if (core != null) {
+          queryResponse = solrClient.request(queryRequest, core);
+        } else if (collection != null) {
+          queryResponse = solrClient.request(queryRequest, collection);
+        }
+      } catch (SolrServerException | IOException e) {
+        this.logger.error("failed to request: " + queryRequest.getPath() + " " + e.getMessage());
+      }
+
+      ObjectMapper om = new ObjectMapper();
+
+      JsonNode metricsJson = om.readTree((String) queryResponse.get("response"));
+
+      List<JsonQuery> jqs = new ArrayList<>();
+      if (jsonQueries != null) {
+        for (String jsonQuery : jsonQueries) {
+          JsonQuery compiledJsonQuery = JsonQuery.compile(jsonQuery);
+          jqs.add(compiledJsonQuery);
+        }
+      }
+
+      for (int i = 0; i < jqs.size(); i++) {
+        JsonQuery q = jqs.get(i);
+        try {
+          List<JsonNode> results = q.apply(metricsJson);
+          for (JsonNode result : results) {
+            String type = result.get("type").textValue();
+            String name = result.get("name").textValue();
+            String help = result.get("help").textValue();
+            Double value = result.get("value").doubleValue();
+            ArrayList<String> labelNames = new ArrayList<>(this.labelNames);
+            ArrayList<String> labelValues = new ArrayList<>(this.labelValues);
+
+            if (solrClient instanceof CloudSolrClient) {
+              labelNames.add("zk_host");
+              labelValues.add(((CloudSolrClient) solrClient).getZkHost());
+            }
+
+            if (collection != null) {
+              labelNames.add("collection");
+              labelValues.add(collection);
+            }
+
+            if (solrClient instanceof HttpSolrClient) {
+              labelNames.add("base_url");
+              labelValues.add(((HttpSolrClient) solrClient).getBaseURL());
+            }
+
+            if (core != null) {
+              labelNames.add("core");
+              labelValues.add(core);
+            }
+
+            for(Iterator<JsonNode> ite = result.get("label_names").iterator();ite.hasNext();){
+              JsonNode item = ite.next();
+              labelNames.add(item.textValue());
+            }
+            for(Iterator<JsonNode> ite = result.get("label_values").iterator();ite.hasNext();){
+              JsonNode item = ite.next();
+              labelValues.add(item.textValue());
+            }
+
+            if (labelNames.indexOf("core") < 0 && labelNames.indexOf("collection") >= 0 && labelNames.indexOf("shard") >= 0 && labelNames.indexOf("replica") >= 0) {
+              StringBuffer sb = new StringBuffer();
+              sb.append(labelValues.get(labelNames.indexOf("collection")))
+                  .append("_")
+                  .append(labelValues.get(labelNames.indexOf("shard")))
+                  .append("_")
+                  .append(labelValues.get(labelNames.indexOf("replica")));
+
+              labelNames.add("core");
+              labelValues.add(sb.toString());
+            }
+
+            if (!metricFamilySamplesMap.containsKey(name)) {
+              Collector.MetricFamilySamples metricFamilySamples = new Collector.MetricFamilySamples(
+                name,
+                Collector.Type.valueOf(type),
+                help,
+                new ArrayList<>()
+              );
+              metricFamilySamplesMap.put(name, metricFamilySamples);
+            }
+
+            Collector.MetricFamilySamples.Sample sample = new Collector.MetricFamilySamples.Sample(name, labelNames, labelValues, value);
+
+            if (!metricFamilySamplesMap.get(name).samples.contains(sample)) {
+              metricFamilySamplesMap.get(name).samples.add(sample);
+            }
+          }
+        } catch (JsonQueryException e) {
+          this.logger.error(e.toString() + " " + q.toString());
+          SolrExporter.scrapeErrorTotal.inc();
+        }
+      }
+    } catch (HttpSolrClient.RemoteSolrException | IOException e) {
+      this.logger.error("failed to request: " + e.toString());
+    } catch (Exception e) {
+      this.logger.error(e.toString());
+      e.printStackTrace();
+    }
+
+    return metricFamilySamplesMap;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6d66fc04/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/package-info.java
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/package-info.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/package-info.java
new file mode 100644
index 0000000..85c4eaa
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/** 
+ * Solr metrics scraper.
+ */
+package org.apache.solr.prometheus.scraper;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6d66fc04/solr/contrib/prometheus-exporter/src/java/overview.html
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/src/java/overview.html b/solr/contrib/prometheus-exporter/src/java/overview.html
new file mode 100644
index 0000000..df1c45a
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/src/java/overview.html
@@ -0,0 +1,26 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<html>
+<body>
+Apache Solr Search Server: Solr Prometheus Exporter contrib
+
+<p>
+This package provides tools for monitoring Solr with Prometheus.
+</p>
+
+</body>
+</html>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6d66fc04/solr/contrib/prometheus-exporter/src/test-files/conf/log4j.properties
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/src/test-files/conf/log4j.properties b/solr/contrib/prometheus-exporter/src/test-files/conf/log4j.properties
new file mode 100644
index 0000000..5dd6899
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/src/test-files/conf/log4j.properties
@@ -0,0 +1,22 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+log4j.rootLogger=INFO, stdout
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd'T'HH:mm:ss.SSS} %-5p [%c] - %m%n