You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ko...@apache.org on 2018/02/20 09:08:33 UTC

[4/7] lucene-solr:branch_7x: SOLR-11795: Add Solr metrics exporter for Prometheus

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5a01e02/solr/contrib/prometheus-exporter/conf/log4j.properties
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/conf/log4j.properties b/solr/contrib/prometheus-exporter/conf/log4j.properties
new file mode 100644
index 0000000..5dd6899
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/conf/log4j.properties
@@ -0,0 +1,22 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+log4j.rootLogger=INFO, stdout
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd'T'HH:mm:ss.SSS} %-5p [%c] - %m%n

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5a01e02/solr/contrib/prometheus-exporter/ivy.xml
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/ivy.xml b/solr/contrib/prometheus-exporter/ivy.xml
new file mode 100644
index 0000000..a6d0705
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/ivy.xml
@@ -0,0 +1,41 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one
+   or more contributor license agreements.  See the NOTICE file
+   distributed with this work for additional information
+   regarding copyright ownership.  The ASF licenses this file
+   to you under the Apache License, Version 2.0 (the
+   "License"); you may not use this file except in compliance
+   with the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing,
+   software distributed under the License is distributed on an
+   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+   KIND, either express or implied.  See the License for the
+   specific language governing permissions and limitations
+   under the License.    
+-->
+<ivy-module version="2.0">
+  <info organisation="org.apache.solr" module="prometheus"/>
+  <configurations defaultconfmapping="compile->master;test->master">
+    <conf name="compile" transitive="false"/>
+    <conf name="test" transitive="false"/>
+  </configurations>
+  <dependencies>
+    <dependency org="io.prometheus" name="simpleclient" rev="0.0.26" conf="compile"/>
+    <dependency org="io.prometheus" name="simpleclient_common" rev="0.0.26" conf="compile"/>
+    <dependency org="io.prometheus" name="simpleclient_httpserver" rev="0.0.26" conf="compile"/>
+    <dependency org="org.yaml" name="snakeyaml" rev="1.16" conf="compile"/>
+    <dependency org="com.fasterxml.jackson.core" name="jackson-core" rev="2.9.1" conf="compile"/>
+    <dependency org="com.fasterxml.jackson.core" name="jackson-databind" rev="2.9.1" conf="compile"/>
+    <dependency org="com.fasterxml.jackson.core" name="jackson-annotations" rev="2.9.1" conf="compile"/>
+    <dependency org="net.thisptr" name="jackson-jq" rev="0.0.8" conf="compile"/>
+    <dependency org="net.sourceforge.argparse4j" name="argparse4j" rev="0.7.0" conf="compile"/>
+    <dependency org="org.slf4j" name="slf4j-api" rev="1.7.25" conf="compile"/>
+    <dependency org="org.slf4j" name="slf4j-log4j12" rev="1.7.25" conf="compile"/>
+    <dependency org="log4j" name="log4j" rev="1.2.17" conf="compile"/>
+
+    <exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
+  </dependencies>
+</ivy-module>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5a01e02/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SolrCollector.java
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SolrCollector.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SolrCollector.java
new file mode 100644
index 0000000..9341fa2
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SolrCollector.java
@@ -0,0 +1,402 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.prometheus.collector;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.solr.prometheus.collector.config.SolrCollectorConfig;
+import org.apache.solr.prometheus.scraper.SolrScraper;
+import org.apache.solr.prometheus.scraper.config.SolrScraperConfig;
+import io.prometheus.client.Collector;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.impl.NoOpResponseParser;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.CoreAdminRequest;
+import org.apache.solr.common.params.CoreAdminParams;
+import org.apache.solr.common.util.NamedList;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * SolrCollector
+ */
+public class SolrCollector extends Collector implements Collector.Describable {
+  private static final Logger logger = LoggerFactory.getLogger(SolrCollector.class);
+
+  private SolrClient solrClient;
+  private SolrCollectorConfig config = new SolrCollectorConfig();
+  private int numThreads;
+
+  private static ObjectMapper om = new ObjectMapper();
+
+  /**
+   * Constructor.
+   */
+  public SolrCollector(SolrClient solrClient, SolrCollectorConfig config, int numThreads) {
+    this.solrClient = solrClient;
+    this.config = config;
+    this.numThreads = numThreads;
+  }
+
+  /**
+   * Describe scrape status.
+   */
+  public List<Collector.MetricFamilySamples> describe() {
+    List<MetricFamilySamples> metricFamilies = new ArrayList<>();
+    metricFamilies.add(new MetricFamilySamples("solr_exporter_duration_seconds", Type.GAUGE, "Time this Solr scrape took, in seconds.", new ArrayList<>()));
+    return metricFamilies;
+  }
+
+  /**
+   * Collect samples.
+   */
+  public List<MetricFamilySamples> collect() {
+    // start time of scraping.
+    long startTime = System.nanoTime();
+
+    Map<String, MetricFamilySamples> metricFamilySamplesMap = new LinkedHashMap<>();
+
+    ExecutorService executorService = Executors.newFixedThreadPool(numThreads);
+    List<Future<Map<String, MetricFamilySamples>>> futureList = new ArrayList<>();
+
+    try {
+      // Ping
+      if (config.getPing() != null) {
+        if (solrClient instanceof CloudSolrClient) {
+          List<HttpSolrClient> httpSolrClients = new ArrayList<>();
+          try {
+            httpSolrClients = getHttpSolrClients((CloudSolrClient) solrClient);
+            for (HttpSolrClient httpSolrClient : httpSolrClients) {
+              try {
+                List<String> cores = getCores(httpSolrClient);
+                for (String core : cores) {
+                  SolrScraperConfig pingConfig;
+                  try {
+                    pingConfig = config.getPing().clone();
+                  } catch (CloneNotSupportedException e) {
+                    logger.error(e.getMessage());
+                    continue;
+                  }
+
+                  pingConfig.getQuery().setCore(core);
+
+                  SolrScraper scraper = new SolrScraper(httpSolrClient, pingConfig, Arrays.asList("zk_host"), Arrays.asList(((CloudSolrClient) solrClient).getZkHost()));
+                  Future<Map<String, MetricFamilySamples>> future = executorService.submit(scraper);
+                  futureList.add(future);
+                }
+              } catch (SolrServerException | IOException e) {
+                logger.error(e.getMessage());
+              }
+            }
+
+            // get future
+            for (Future<Map<String, MetricFamilySamples>> future : futureList) {
+              try {
+                Map<String, MetricFamilySamples> m = future.get(60, TimeUnit.SECONDS);
+                mergeMetrics(metricFamilySamplesMap, m);
+              } catch (InterruptedException | ExecutionException | TimeoutException e) {
+                logger.error(e.getMessage());
+              }
+            }
+          } catch (SolrServerException | IOException e) {
+            logger.error(e.getMessage());
+          } finally {
+            for (HttpSolrClient httpSolrClient : httpSolrClients) {
+              try {
+                httpSolrClient.close();
+              } catch (IOException e) {
+                logger.error(e.getMessage());
+              }
+            }
+          }
+
+          try {
+            List<String> collections = getCollections((CloudSolrClient) solrClient);
+            for (String collection : collections) {
+              SolrScraperConfig pingConfig;
+              try {
+                pingConfig = config.getPing().clone();
+              } catch (CloneNotSupportedException e) {
+                logger.error(e.getMessage());
+                continue;
+              }
+
+              pingConfig.getQuery().setCollection(collection);
+              LinkedHashMap<String, String> distrib = new LinkedHashMap<>();
+              distrib.put("distrib", "true");
+              pingConfig.getQuery().setParams(Collections.singletonList(distrib));
+
+              SolrScraper scraper = new SolrScraper(solrClient, pingConfig);
+              Future<Map<String, MetricFamilySamples>> future = executorService.submit(scraper);
+              futureList.add(future);
+            }
+          } catch (SolrServerException | IOException e) {
+            logger.error(e.getMessage());
+          }
+        } else {
+          try {
+            List<String> cores = getCores((HttpSolrClient) solrClient);
+            for (String core : cores) {
+              SolrScraperConfig pingConfig = new SolrScraperConfig();
+              pingConfig.setQuery(config.getPing().getQuery());
+              pingConfig.getQuery().setCore(core);
+
+              pingConfig.setJsonQueries(config.getPing().getJsonQueries());
+
+              SolrScraper scraper = new SolrScraper(solrClient, pingConfig);
+              Future<Map<String, MetricFamilySamples>> future = executorService.submit(scraper);
+              futureList.add(future);
+            }
+          } catch (SolrServerException | IOException e) {
+            logger.error(e.getMessage());
+          }
+        }
+      }
+
+      // Metrics
+      if (config.getMetrics() != null) {
+        if (solrClient instanceof CloudSolrClient) {
+          List<HttpSolrClient> httpSolrClients = new ArrayList<>();
+          try {
+            httpSolrClients = getHttpSolrClients((CloudSolrClient) solrClient);
+            for (HttpSolrClient httpSolrClient : httpSolrClients) {
+              SolrScraper scraper = new SolrScraper(httpSolrClient, config.getMetrics(), Arrays.asList("zk_host"), Arrays.asList(((CloudSolrClient) solrClient).getZkHost()));
+              Future<Map<String, MetricFamilySamples>> future = executorService.submit(scraper);
+              futureList.add(future);
+            }
+
+            // get future
+            for (Future<Map<String, MetricFamilySamples>> future : futureList) {
+              try {
+                Map<String, MetricFamilySamples> m = future.get(60, TimeUnit.SECONDS);
+                mergeMetrics(metricFamilySamplesMap, m);
+              } catch (InterruptedException | ExecutionException | TimeoutException e) {
+                logger.error(e.getMessage());
+              }
+            }
+          } catch (SolrServerException | IOException e) {
+            logger.error(e.getMessage());
+          } finally {
+            for (HttpSolrClient httpSolrClient : httpSolrClients) {
+              try {
+                httpSolrClient.close();
+              } catch (IOException e) {
+                logger.error(e.getMessage());
+              }
+            }
+          }
+        } else {
+          SolrScraper scraper = new SolrScraper(solrClient, config.getMetrics());
+          Future<Map<String, MetricFamilySamples>> future = executorService.submit(scraper);
+          futureList.add(future);
+        }
+      }
+
+      // Collections
+      if (config.getCollections() != null) {
+        if (solrClient instanceof CloudSolrClient) {
+          SolrScraper scraper = new SolrScraper(solrClient, config.getCollections());
+          Future<Map<String, MetricFamilySamples>> future = executorService.submit(scraper);
+          futureList.add(future);
+        }
+      }
+
+      // Query
+      if (config.getQueries() != null) {
+        for (SolrScraperConfig c : config.getQueries()) {
+          SolrScraper scraper = new SolrScraper(solrClient, c);
+          Future<Map<String, MetricFamilySamples>> future = executorService.submit(scraper);
+          futureList.add(future);
+        }
+      }
+
+      // get future
+      for (Future<Map<String, MetricFamilySamples>> future : futureList) {
+        try {
+          Map<String, MetricFamilySamples> m = future.get(60, TimeUnit.SECONDS);
+          mergeMetrics(metricFamilySamplesMap, m);
+        } catch (InterruptedException | ExecutionException | TimeoutException e) {
+          logger.error(e.getMessage());
+        }
+      }
+    } finally {
+      executorService.shutdown();
+    }
+
+    // return value
+    List<MetricFamilySamples> metricFamiliesSamplesList = new ArrayList<>();
+
+    // add solr metrics
+    for (String gaugeMetricName : metricFamilySamplesMap.keySet()) {
+      MetricFamilySamples metricFamilySamples = metricFamilySamplesMap.get(gaugeMetricName);
+      if (metricFamilySamples.samples.size() > 0) {
+        metricFamiliesSamplesList.add(metricFamilySamples);
+      }
+    }
+
+    // add scrape duration metric
+    List<MetricFamilySamples.Sample> durationSample = new ArrayList<>();
+    durationSample.add(new MetricFamilySamples.Sample("solr_exporter_duration_seconds", new ArrayList<>(), new ArrayList<>(), (System.nanoTime() - startTime) / 1.0E9));
+    metricFamiliesSamplesList.add(new MetricFamilySamples("solr_exporter_duration_seconds", Type.GAUGE, "Time this Solr exporter took, in seconds.", durationSample));
+
+    return metricFamiliesSamplesList;
+  }
+
+  /**
+   * Merge metrics.
+   */
+  private Map<String, MetricFamilySamples> mergeMetrics(Map<String, MetricFamilySamples> metrics1, Map<String, MetricFamilySamples> metrics2) {
+    // marge MetricFamilySamples
+    for (String k : metrics2.keySet()) {
+      if (metrics1.containsKey(k)) {
+        for (MetricFamilySamples.Sample sample : metrics2.get(k).samples) {
+          if (!metrics1.get(k).samples.contains(sample)) {
+            metrics1.get(k).samples.add(sample);
+          }
+        }
+      } else {
+        metrics1.put(k, metrics2.get(k));
+      }
+    }
+
+    return metrics1;
+  }
+
+
+  /**
+   * Get target cores via CoreAdminAPI.
+   */
+  public static List<String> getCores(HttpSolrClient httpSolrClient) throws SolrServerException, IOException {
+    List<String> cores = new ArrayList<>();
+
+    NoOpResponseParser responseParser = new NoOpResponseParser();
+    responseParser.setWriterType("json");
+
+    httpSolrClient.setParser(responseParser);
+
+    CoreAdminRequest coreAdminRequest = new CoreAdminRequest();
+    coreAdminRequest.setAction(CoreAdminParams.CoreAdminAction.STATUS);
+    coreAdminRequest.setIndexInfoNeeded(false);
+
+    NamedList<Object> coreAdminResponse = httpSolrClient.request(coreAdminRequest);
+
+    JsonNode statusJsonNode = om.readTree((String) coreAdminResponse.get("response")).get("status");
+
+    for (Iterator<JsonNode> i = statusJsonNode.iterator(); i.hasNext(); ) {
+      String core = i.next().get("name").textValue();
+      if (!cores.contains(core)) {
+        cores.add(core);
+      }
+    }
+
+    return cores;
+  }
+
+  /**
+   * Get target cores via CollectionsAPI.
+   */
+  public static List<String> getCollections(CloudSolrClient cloudSolrClient) throws SolrServerException, IOException {
+    List<String> collections = new ArrayList<>();
+
+    NoOpResponseParser responseParser = new NoOpResponseParser();
+    responseParser.setWriterType("json");
+
+    cloudSolrClient.setParser(responseParser);
+
+    CollectionAdminRequest collectionAdminRequest = new CollectionAdminRequest.List();
+
+    NamedList<Object> collectionAdminResponse = cloudSolrClient.request(collectionAdminRequest);
+
+    JsonNode collectionsJsonNode = om.readTree((String) collectionAdminResponse.get("response")).get("collections");
+
+    for (Iterator<JsonNode> i = collectionsJsonNode.iterator(); i.hasNext(); ) {
+      String collection = i.next().textValue();
+      if (!collections.contains(collection)) {
+        collections.add(collection);
+      }
+    }
+
+    return collections;
+  }
+
+  /**
+   * Get base urls via CollectionsAPI.
+   */
+  private List<String> getBaseUrls(CloudSolrClient cloudSolrClient) throws SolrServerException, IOException {
+    List<String> baseUrls = new ArrayList<>();
+
+    NoOpResponseParser responseParser = new NoOpResponseParser();
+    responseParser.setWriterType("json");
+
+    cloudSolrClient.setParser(responseParser);
+
+    CollectionAdminRequest collectionAdminRequest = new CollectionAdminRequest.ClusterStatus();
+
+    NamedList<Object> collectionAdminResponse = cloudSolrClient.request(collectionAdminRequest);
+
+    List<JsonNode> baseUrlJsonNode = om.readTree((String) collectionAdminResponse.get("response")).findValues("base_url");
+
+    for (Iterator<JsonNode> i = baseUrlJsonNode.iterator(); i.hasNext(); ) {
+      String baseUrl = i.next().textValue();
+      if (!baseUrls.contains(baseUrl)) {
+        baseUrls.add(baseUrl);
+      }
+    }
+
+    return baseUrls;
+  }
+
+  /**
+   * Get HTTP Solr Clients
+   */
+  private List<HttpSolrClient> getHttpSolrClients(CloudSolrClient cloudSolrClient) throws SolrServerException, IOException {
+    List<HttpSolrClient> solrClients = new ArrayList<>();
+
+    for (String baseUrl : getBaseUrls(cloudSolrClient)) {
+      NoOpResponseParser responseParser = new NoOpResponseParser();
+      responseParser.setWriterType("json");
+
+      HttpSolrClient.Builder builder = new HttpSolrClient.Builder();
+      builder.withBaseSolrUrl(baseUrl);
+
+      HttpSolrClient httpSolrClient = builder.build();
+      httpSolrClient.setParser(responseParser);
+
+      solrClients.add(httpSolrClient);
+    }
+
+    return solrClients;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5a01e02/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/config/SolrCollectorConfig.java
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/config/SolrCollectorConfig.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/config/SolrCollectorConfig.java
new file mode 100644
index 0000000..8344802
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/config/SolrCollectorConfig.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.prometheus.collector.config;
+
+import org.apache.solr.prometheus.scraper.config.SolrScraperConfig;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * SolrCollectorConfig
+ */
+public class SolrCollectorConfig {
+  private SolrScraperConfig ping = new SolrScraperConfig();
+  private SolrScraperConfig metrics = new SolrScraperConfig();
+  private SolrScraperConfig collections = new SolrScraperConfig();
+  private List<SolrScraperConfig> queries = new ArrayList<>();
+
+  public SolrScraperConfig getPing() {
+    return ping;
+  }
+
+  public void setPing(SolrScraperConfig ping) {
+    this.ping = ping;
+  }
+
+  public SolrScraperConfig getMetrics() {
+    return metrics;
+  }
+
+  public void setMetrics(SolrScraperConfig metrics) {
+    this.metrics = metrics;
+  }
+
+  public SolrScraperConfig getCollections() {
+    return collections;
+  }
+
+  public void setCollections(SolrScraperConfig collections) {
+    this.collections = collections;
+  }
+
+  public List<SolrScraperConfig> getQueries() {
+    return queries;
+  }
+
+  public void setQueries(List<SolrScraperConfig> queries) {
+    this.queries = queries;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5a01e02/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java
new file mode 100644
index 0000000..a51207d
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java
@@ -0,0 +1,254 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.prometheus.exporter;
+
+import org.apache.solr.prometheus.collector.SolrCollector;
+import org.apache.solr.prometheus.collector.config.SolrCollectorConfig;
+import io.prometheus.client.CollectorRegistry;
+import io.prometheus.client.Counter;
+import io.prometheus.client.exporter.HTTPServer;
+import net.sourceforge.argparse4j.ArgumentParsers;
+import net.sourceforge.argparse4j.impl.Arguments;
+import net.sourceforge.argparse4j.inf.ArgumentParser;
+import net.sourceforge.argparse4j.inf.ArgumentParserException;
+import net.sourceforge.argparse4j.inf.Namespace;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.impl.NoOpResponseParser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+
+import javax.management.MalformedObjectNameException;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Properties;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * SolrExporter
+ */
+public class SolrExporter {
+  private static final Logger logger = LoggerFactory.getLogger(SolrExporter.class);
+
+  private static final String[] ARG_PORT_FLAGS = { "-p", "--port" };
+  private static final String ARG_PORT_METAVAR = "PORT";
+  private static final String ARG_PORT_DEST = "port";
+  private static final Integer ARG_PORT_DEFAULT = 9983;
+  private static final String ARG_PORT_HELP = "solr-exporter listen port";
+
+  private static final String[] ARG_BASE_URL_FLAGS = { "-b", "--baseurl" };
+  private static final String ARG_BASE_URL_METAVAR = "BASE_URL";
+  private static final String ARG_BASE_URL_DEST = "baseUrl";
+  private static final String ARG_BASE_URL_DEFAULT = "";
+  private static final String ARG_BASE_URL_HELP = "specify Solr base URL when connecting to Solr in standalone mode (for example 'http://localhost:8983/solr')";
+
+  private static final String[] ARG_ZK_HOST_FLAGS = { "-z", "--zkhost" };
+  private static final String ARG_ZK_HOST_METAVAR = "ZK_HOST";
+  private static final String ARG_ZK_HOST_DEST = "zkHost";
+  private static final String ARG_ZK_HOST_DEFAULT = "";
+  private static final String ARG_ZK_HOST_HELP = "specify ZooKeeper connection string when connecting to Solr in SolrCloud mode (for example 'localhost:2181/solr')";
+
+  private static final String[] ARG_CONFIG_FLAGS = { "-f", "--config-file" };
+  private static final String ARG_CONFIG_METAVAR = "CONFIG";
+  private static final String ARG_CONFIG_DEST = "configFile";
+  private static final String ARG_CONFIG_DEFAULT = "./conf/config.yml";
+  private static final String ARG_CONFIG_HELP = "specify configuration file";
+
+  private static final String[] ARG_NUM_THREADS_FLAGS = { "-n", "--num-thread" };
+  private static final String ARG_NUM_THREADS_METAVAR = "NUM_THREADS";
+  private static final String ARG_NUM_THREADS_DEST = "numThreads";
+  private static final Integer ARG_NUM_THREADS_DEFAULT = 1;
+  private static final String ARG_NUM_THREADS_HELP = "specify number of threads";
+
+  private int port;
+  private SolrClient solrClient;
+  private SolrCollectorConfig config;
+  private int numThreads;
+
+  CollectorRegistry registry = new CollectorRegistry();
+
+  private HTTPServer httpServer;
+  private SolrCollector collector;
+
+  public static final Counter scrapeErrorTotal = Counter.build()
+      .name("solr_exporter_scrape_error_total")
+      .help("Number of scrape error.").register();
+
+  /**
+   * Constructor.
+   */
+  public SolrExporter(int port, SolrClient solrClient, File configFile, int numThreads) throws IOException {
+    this(port, solrClient, new Yaml().loadAs(new FileReader(configFile), SolrCollectorConfig.class), numThreads);
+  }
+
+  /**
+   * Constructor.
+   */
+  public SolrExporter(int port, SolrClient solrClient, SolrCollectorConfig config, int numThreads) {
+    super();
+
+    this.port = port;
+    this.solrClient = solrClient;
+    this.config = config;
+    this.numThreads = numThreads;
+  }
+
+  /**
+   * Start HTTP server for exporting Solr metrics.
+   */
+  public void start() throws MalformedObjectNameException, IOException {
+    InetSocketAddress socket = new InetSocketAddress(port);
+
+    this.collector = new SolrCollector(solrClient, config, numThreads);
+
+    this.registry.register(this.collector);
+    this.registry.register(scrapeErrorTotal);
+
+    this.httpServer = new HTTPServer(socket, this.registry);
+  }
+
+  /**
+   * Stop HTTP server for exporting Solr metrics.
+   */
+  public void stop() throws IOException {
+    this.httpServer.stop();
+    this.registry.unregister(this.collector);
+  }
+
+  /**
+   * Create Solr client
+   */
+  private static SolrClient createClient(String connStr) {
+    SolrClient solrClient;
+
+    Pattern baseUrlPattern = Pattern.compile("^https?:\\/\\/[\\w\\/:%#\\$&\\?\\(\\)~\\.=\\+\\-]+$");
+    Pattern zkHostPattern = Pattern.compile("^(?<host>[^\\/]+)(?<chroot>|(?:\\/.*))$");
+    Matcher matcher;
+
+    matcher = baseUrlPattern.matcher(connStr);
+    if (matcher.matches()) {
+      NoOpResponseParser responseParser = new NoOpResponseParser();
+      responseParser.setWriterType("json");
+
+      HttpSolrClient.Builder builder = new HttpSolrClient.Builder();
+      builder.withBaseSolrUrl(connStr);
+
+      HttpSolrClient httpSolrClient = builder.build();
+      httpSolrClient.setParser(responseParser);
+
+      solrClient = httpSolrClient;
+    } else {
+      String host = "";
+      String chroot = "";
+
+      matcher = zkHostPattern.matcher(connStr);
+      if (matcher.matches()) {
+        host = matcher.group("host") != null ? matcher.group("host") : "";
+        chroot = matcher.group("chroot") != null ? matcher.group("chroot") : "";
+      }
+
+      NoOpResponseParser responseParser = new NoOpResponseParser();
+      responseParser.setWriterType("json");
+
+      CloudSolrClient.Builder builder = new CloudSolrClient.Builder();
+      if (host.contains(",")) {
+        List<String> hosts = new ArrayList<>();
+        for (String h : host.split(",")) {
+          if (h != null && !h.equals("")) {
+            hosts.add(h.trim());
+          }
+        }
+        builder.withZkHost(hosts);
+      } else {
+        builder.withZkHost(host);
+      }
+      if (chroot.equals("")) {
+        builder.withZkChroot("/");
+      } else {
+        builder.withZkChroot(chroot);
+      }
+
+      CloudSolrClient cloudSolrClient = builder.build();
+      cloudSolrClient.setParser(responseParser);
+
+      solrClient = cloudSolrClient;
+    }
+
+    return solrClient;
+  }
+
+  /**
+   * Entry point of SolrExporter.
+   */
+  public static void main( String[] args ) {
+    ArgumentParser parser = ArgumentParsers.newArgumentParser(SolrCollector.class.getSimpleName())
+        .description("Prometheus exporter for Apache Solr.");
+
+    parser.addArgument(ARG_PORT_FLAGS)
+        .metavar(ARG_PORT_METAVAR).dest(ARG_PORT_DEST).type(Integer.class)
+        .setDefault(ARG_PORT_DEFAULT).help(ARG_PORT_HELP);
+
+    parser.addArgument(ARG_BASE_URL_FLAGS)
+        .metavar(ARG_BASE_URL_METAVAR).dest(ARG_BASE_URL_DEST).type(String.class)
+        .setDefault(ARG_BASE_URL_DEFAULT).help(ARG_BASE_URL_HELP);
+
+    parser.addArgument(ARG_ZK_HOST_FLAGS)
+        .metavar(ARG_ZK_HOST_METAVAR).dest(ARG_ZK_HOST_DEST).type(String.class)
+        .setDefault(ARG_ZK_HOST_DEFAULT).help(ARG_ZK_HOST_HELP);
+
+    parser.addArgument(ARG_CONFIG_FLAGS)
+        .metavar(ARG_CONFIG_METAVAR).dest(ARG_CONFIG_DEST).type(String.class)
+        .setDefault(ARG_CONFIG_DEFAULT).help(ARG_CONFIG_HELP);
+
+    parser.addArgument(ARG_NUM_THREADS_FLAGS)
+        .metavar(ARG_NUM_THREADS_METAVAR).dest(ARG_NUM_THREADS_DEST).type(Integer.class)
+        .setDefault(ARG_NUM_THREADS_DEFAULT).help(ARG_NUM_THREADS_HELP);
+
+    try {
+      Namespace res = parser.parseArgs(args);
+
+      int port = res.getInt(ARG_PORT_DEST);
+
+      String connStr = "http://localhost:8983/solr";
+      if (!res.getString(ARG_BASE_URL_DEST).equals("")) {
+        connStr = res.getString(ARG_BASE_URL_DEST);
+      } else if (!res.getString(ARG_ZK_HOST_DEST).equals("")) {
+        connStr = res.getString(ARG_ZK_HOST_DEST);
+      }
+
+      File configFile = new File(res.getString(ARG_CONFIG_DEST));
+      int numThreads = res.getInt(ARG_NUM_THREADS_DEST);
+
+      SolrClient solrClient = createClient(connStr);
+
+      SolrExporter solrExporter = new SolrExporter(port, solrClient, configFile, numThreads);
+      solrExporter.start();
+      logger.info("Start server");
+    } catch (MalformedObjectNameException | IOException e) {
+      logger.error("Start server failed: " + e.toString());
+    } catch (ArgumentParserException e) {
+      parser.handleError(e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5a01e02/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java
new file mode 100644
index 0000000..56929fc
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java
@@ -0,0 +1,218 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.prometheus.scraper;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.solr.prometheus.exporter.SolrExporter;
+import org.apache.solr.prometheus.scraper.config.SolrQueryConfig;
+import org.apache.solr.prometheus.scraper.config.SolrScraperConfig;
+import io.prometheus.client.Collector;
+import net.thisptr.jackson.jq.JsonQuery;
+import net.thisptr.jackson.jq.exception.JsonQueryException;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.QueryRequest;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Callable;
+
+/**
+ * SolrScraper
+ */
+public class SolrScraper implements Callable<Map<String, Collector.MetricFamilySamples>> {
+  private static final Logger logger = LoggerFactory.getLogger(SolrScraper.class);
+
+  private SolrClient solrClient;
+  private SolrScraperConfig scraperConfig;
+
+  private List<String> labelNames;
+  private List<String> labelValues;
+
+  /**
+   * Constructor.
+   */
+  public SolrScraper(SolrClient solrClient, SolrScraperConfig scraperConfig) {
+    this(solrClient, scraperConfig, new ArrayList<>(), new ArrayList<>());
+  }
+
+  /**
+   * Constructor.
+   */
+  public SolrScraper(SolrClient solrClient, SolrScraperConfig scraperConfig, List<String> labelNames, List<String> labelValues) {
+    super();
+
+    this.solrClient = solrClient;
+    this.scraperConfig = scraperConfig;
+
+    this.labelNames = labelNames;
+    this.labelValues = labelValues;
+  }
+
+  /**
+   * Execute collectResponse
+   */
+  @Override
+  public Map<String, Collector.MetricFamilySamples> call() throws Exception {
+    return collectResponse(this.solrClient, this.scraperConfig);
+  }
+
+  /**
+   * Collect facet count.
+   */
+  public Map<String, Collector.MetricFamilySamples> collectResponse(SolrClient solrClient, SolrScraperConfig scraperConfig) {
+    Map<String, Collector.MetricFamilySamples> metricFamilySamplesMap = new LinkedHashMap<>();
+
+    try {
+      SolrQueryConfig queryConfig = scraperConfig.getQuery();
+
+      // create Solr request parameters
+      ModifiableSolrParams params = new ModifiableSolrParams();
+      for (Map<String, String> param : queryConfig.getParams()) {
+        for (String name : param.keySet()) {
+          Object obj = param.get(name);
+          if (obj instanceof Number) {
+            params.add(name, obj.toString());
+          } else {
+            params.add(name, param.get(name));
+          }
+        }
+      }
+
+      // create Solr queryConfig request
+      QueryRequest queryRequest = new QueryRequest(params);
+      queryRequest.setPath(queryConfig.getPath());
+
+      // invoke Solr
+      NamedList<Object> queryResponse = null;
+      if (queryConfig.getCore().equals("") && queryConfig.getCollection().equals("")) {
+        queryResponse = solrClient.request(queryRequest);
+      } else if (!queryConfig.getCore().equals("")) {
+        queryResponse = solrClient.request(queryRequest, queryConfig.getCore());
+      } else if (!queryConfig.getCollection().equals("")) {
+        queryResponse = solrClient.request(queryRequest, queryConfig.getCollection());
+      }
+
+      ObjectMapper om = new ObjectMapper();
+
+      JsonNode metricsJson = om.readTree((String) queryResponse.get("response"));
+
+      List<JsonQuery> jqs = new ArrayList<>();
+      for (String jsonQuery : scraperConfig.getJsonQueries()) {
+        JsonQuery compiledJsonQuery = JsonQuery.compile(jsonQuery);
+        jqs.add(compiledJsonQuery);
+      }
+
+      for (int i = 0; i < jqs.size(); i++) {
+        JsonQuery q = jqs.get(i);
+        try {
+          List<JsonNode> results = q.apply(metricsJson);
+          for (JsonNode result : results) {
+            String type = result.get("type").textValue();
+            String name = result.get("name").textValue();
+            String help = result.get("help").textValue();
+            Double value = result.get("value").doubleValue();
+            ArrayList<String> labelNames = new ArrayList<>(this.labelNames);
+            ArrayList<String> labelValues = new ArrayList<>(this.labelValues);
+
+            if (solrClient instanceof CloudSolrClient) {
+              labelNames.add("zk_host");
+              labelValues.add(((CloudSolrClient) solrClient).getZkHost());
+            }
+
+            if (!scraperConfig.getQuery().getCollection().equals("")) {
+              labelNames.add("collection");
+              labelValues.add(scraperConfig.getQuery().getCollection());
+            }
+
+            if (solrClient instanceof HttpSolrClient) {
+              labelNames.add("base_url");
+              labelValues.add(((HttpSolrClient) solrClient).getBaseURL());
+            }
+
+            if (!scraperConfig.getQuery().getCore().equals("")) {
+              labelNames.add("core");
+              labelValues.add(scraperConfig.getQuery().getCore());
+            }
+
+            for(Iterator<JsonNode> ite = result.get("label_names").iterator();ite.hasNext();){
+              JsonNode item = ite.next();
+              labelNames.add(item.textValue());
+            }
+            for(Iterator<JsonNode> ite = result.get("label_values").iterator();ite.hasNext();){
+              JsonNode item = ite.next();
+              labelValues.add(item.textValue());
+            }
+
+            if (labelNames.indexOf("core") < 0 && labelNames.indexOf("collection") >= 0 && labelNames.indexOf("shard") >= 0 && labelNames.indexOf("replica") >= 0) {
+              if (labelValues.get(labelNames.indexOf("collection")).equals("-") && labelValues.get(labelNames.indexOf("shard")).equals("-") && labelValues.get(labelNames.indexOf("replica")).equals("-")) {
+                labelNames.add("core");
+                labelValues.add("-");
+              } else {
+                StringBuffer sb = new StringBuffer();
+                sb.append(labelValues.get(labelNames.indexOf("collection")))
+                    .append("_")
+                    .append(labelValues.get(labelNames.indexOf("shard")))
+                    .append("_")
+                    .append(labelValues.get(labelNames.indexOf("replica")));
+
+                labelNames.add("core");
+                labelValues.add(sb.toString());
+              }
+            }
+
+            if (!metricFamilySamplesMap.containsKey(name)) {
+              Collector.MetricFamilySamples metricFamilySamples = new Collector.MetricFamilySamples(
+                name,
+                Collector.Type.valueOf(type),
+                help,
+                new ArrayList<>()
+              );
+              metricFamilySamplesMap.put(name, metricFamilySamples);
+            }
+
+            Collector.MetricFamilySamples.Sample sample = new Collector.MetricFamilySamples.Sample(name, labelNames, labelValues, value);
+
+            if (!metricFamilySamplesMap.get(name).samples.contains(sample)) {
+              metricFamilySamplesMap.get(name).samples.add(sample);
+            }
+          }
+        } catch (JsonQueryException e) {
+          logger.error(e.toString() + " " + q.toString());
+          SolrExporter.scrapeErrorTotal.inc();
+        }
+      }
+    } catch (HttpSolrClient.RemoteSolrException | SolrServerException | IOException e) {
+      logger.error(e.toString());
+    } catch (Exception e) {
+      logger.error(e.toString());
+    }
+
+    return metricFamilySamplesMap;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5a01e02/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/config/SolrQueryConfig.java
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/config/SolrQueryConfig.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/config/SolrQueryConfig.java
new file mode 100644
index 0000000..500bb03
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/config/SolrQueryConfig.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.prometheus.scraper.config;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+
+/**
+ * SolrQueryConfig
+ */
+public class SolrQueryConfig implements Cloneable {
+  private String core = "";
+  private String collection = "";
+  private String path = "";
+  private List<LinkedHashMap<String, String>> params = new ArrayList<>();
+
+  public String getCore() {
+    return core;
+  }
+
+  public void setCore(String core) {
+    this.core = core;
+  }
+
+  public String getCollection() {
+    return collection;
+  }
+
+  public void setCollection(String collection) {
+    this.collection = collection;
+  }
+
+  public String getPath() {
+    return path;
+  }
+
+  public void setPath(String path) {
+    this.path = path;
+  }
+
+  public List<LinkedHashMap<String, String>> getParams() {
+    return params;
+  }
+
+  public void setParams(List<LinkedHashMap<String, String>> params) {
+    this.params = params;
+  }
+
+  public String getParamsString() {
+    StringBuffer buffer = new StringBuffer();
+
+    for(Iterator<LinkedHashMap<String, String>> i = getParams().iterator(); i.hasNext(); ) {
+      LinkedHashMap<String, String> param = i.next();
+      for(Iterator<String> j = param.keySet().iterator(); j.hasNext(); ) {
+        String name = j.next();
+        buffer.append(name).append("=").append(param.get(name));
+        if (j.hasNext()) {
+          buffer.append("&");
+        }
+      }
+      if (i.hasNext()) {
+        buffer.append("&");
+      }
+    }
+
+    return buffer.toString();
+  }
+
+  public SolrQueryConfig clone() throws CloneNotSupportedException {
+    SolrQueryConfig queryConfig = null;
+
+    try {
+      queryConfig = (SolrQueryConfig) super.clone();
+      queryConfig.setCore(new String(this.core));
+      queryConfig.setCollection(new String(this.collection));
+      queryConfig.setParams(new ArrayList<>(this.params));
+    }catch (Exception e){
+      e.printStackTrace();
+    }
+
+    return queryConfig;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5a01e02/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/config/SolrScraperConfig.java
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/config/SolrScraperConfig.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/config/SolrScraperConfig.java
new file mode 100644
index 0000000..3e3a36f
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/config/SolrScraperConfig.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.prometheus.scraper.config;
+
+import net.thisptr.jackson.jq.exception.JsonQueryException;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * SolrScraperConfig
+ */
+public class SolrScraperConfig implements Cloneable {
+  private SolrQueryConfig query = new SolrQueryConfig();
+  private List<String> jsonQueries = new ArrayList<>();
+
+  public SolrQueryConfig getQuery() {
+    return this.query;
+  }
+
+  public void setQuery(SolrQueryConfig query) {
+    this.query = query;
+  }
+
+  public List<String> getJsonQueries() {
+    return jsonQueries;
+  }
+
+  public void setJsonQueries(List<String> jsonQueries) throws JsonQueryException {
+    this.jsonQueries = jsonQueries;
+  }
+
+  public SolrScraperConfig clone() throws CloneNotSupportedException {
+    SolrScraperConfig scraperConfig = null;
+
+    try {
+      scraperConfig = (SolrScraperConfig) super.clone();
+      scraperConfig.setQuery(this.query.clone());
+      scraperConfig.setJsonQueries(new ArrayList<>(this.jsonQueries));
+    }catch (Exception e){
+      e.printStackTrace();
+    }
+
+    return scraperConfig;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5a01e02/solr/contrib/prometheus-exporter/src/java/overview.html
----------------------------------------------------------------------
diff --git a/solr/contrib/prometheus-exporter/src/java/overview.html b/solr/contrib/prometheus-exporter/src/java/overview.html
new file mode 100644
index 0000000..6c7dfce
--- /dev/null
+++ b/solr/contrib/prometheus-exporter/src/java/overview.html
@@ -0,0 +1,21 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<html>
+<body>
+Apache Solr Search Server: Solr Prometheus Exporter contrib
+</body>
+</html>