You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@plc4x.apache.org by jf...@apache.org on 2018/12/11 08:54:40 UTC
[incubator-plc4x] 01/01: [plc4j-scraper] Implementation of JMX.
This is an automated email from the ASF dual-hosted git repository.
jfeinauer pushed a commit to branch features/scraper-jmx-support
in repository https://gitbox.apache.org/repos/asf/incubator-plc4x.git
commit 9caa1b289d208033ba22664802deced1e55f17f2
Author: Julian Feinauer <j....@pragmaticminds.de>
AuthorDate: Sun Nov 25 17:43:50 2018 +0100
[plc4j-scraper] Implementation of JMX.
---
plc4j/utils/scraper/pom.xml | 6 ++++
.../org/apache/plc4x/java/scraper/Scraper.java | 35 +++++++++++++++++++++-
.../apache/plc4x/java/scraper/ScraperMBean.java} | 19 ++++--------
.../org/apache/plc4x/java/scraper/ScraperTask.java | 26 +++++++++++++++-
.../plc4x/java/scraper/ScraperTaskMBean.java} | 19 +++++-------
.../apache/plc4x/java/scraper/ScraperRunner.java | 19 ++++++++++--
6 files changed, 95 insertions(+), 29 deletions(-)
diff --git a/plc4j/utils/scraper/pom.xml b/plc4j/utils/scraper/pom.xml
index 8b9bf2b..ee65b41 100644
--- a/plc4j/utils/scraper/pom.xml
+++ b/plc4j/utils/scraper/pom.xml
@@ -70,6 +70,12 @@
<artifactId>plc4j-connection-pool</artifactId>
<version>0.3.0-SNAPSHOT</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.plc4x</groupId>
+ <artifactId>plc4j-driver-simulated</artifactId>
+ <version>0.3.0-SNAPSHOT</version>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/Scraper.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/Scraper.java
index 725e58e..bc23b35 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/Scraper.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/Scraper.java
@@ -35,6 +35,8 @@ import org.apache.plc4x.java.utils.connectionpool.PooledPlcDriverManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import javax.management.*;
+import java.lang.management.ManagementFactory;
import java.util.List;
import java.util.Locale;
import java.util.Map;
@@ -43,9 +45,10 @@ import java.util.concurrent.*;
/**
* Main class that orchestrates scraping.
*/
-public class Scraper {
+public class Scraper implements ScraperMBean {
private static final Logger LOGGER = LoggerFactory.getLogger(Scraper.class);
+ public static final String MX_DOMAIN = "org.apache.plc4x.java";
private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(10,
new BasicThreadFactory.Builder()
@@ -66,6 +69,7 @@ public class Scraper {
private final MultiValuedMap<ScraperTask, ScheduledFuture<?>> futures = new ArrayListValuedHashMap<>();
private final PlcDriverManager driverManager;
private final List<ScrapeJob> jobs;
+ private MBeanServer mBeanServer;
/**
* Creates a Scraper instance from a configuration.
@@ -103,6 +107,13 @@ public class Scraper {
Validate.notEmpty(jobs);
this.driverManager = driverManager;
this.jobs = jobs;
+ // Register MBean
+ mBeanServer = ManagementFactory.getPlatformMBeanServer();
+ try {
+ mBeanServer.registerMBean(this, new ObjectName(MX_DOMAIN, "scraper", "scraper"));
+ } catch (InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException | MalformedObjectNameException e) {
+ LOGGER.debug("Unable to register Scraper as MBean", e);
+ }
}
/**
@@ -124,6 +135,8 @@ public class Scraper {
tuple.getLeft().getFields(),
1_000,
handlerPool, resultHandler);
+ // Register task mxbean
+ registerTaskMBean(task);
// Add task to internal list
tasks.put(tuple.getLeft(), task);
ScheduledFuture<?> future = scheduler.scheduleAtFixedRate(task,
@@ -150,6 +163,18 @@ public class Scraper {
}
/**
+ * Register a task as MBean
+ * @param task task to register
+ */
+ private void registerTaskMBean(ScraperTask task) {
+ try {
+ mBeanServer.registerMBean(task, new ObjectName(MX_DOMAIN + ":type=ScrapeTask,name=" + task.getJobName() + "-" + task.getConnectionAlias()));
+ } catch (InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException | MalformedObjectNameException e) {
+ LOGGER.debug("Unable to register Task as MBean", e);
+ }
+ }
+
+ /**
* For testing.
*/
ScheduledExecutorService getScheduler() {
@@ -171,4 +196,12 @@ public class Scraper {
futures.clear();
}
+ // MBean methods
+
+ @Override
+ public boolean isRunning() {
+ // TODO is this okay so?
+ return !futures.isEmpty();
+ }
+
}
diff --git a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperRunner.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScraperMBean.java
similarity index 58%
copy from plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperRunner.java
copy to plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScraperMBean.java
index 029a25a..edc2d5a 100644
--- a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperRunner.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScraperMBean.java
@@ -19,20 +19,13 @@
package org.apache.plc4x.java.scraper;
-import org.apache.plc4x.java.scraper.config.ScraperConfiguration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-
-public class ScraperRunner {
+/**
+ * MBean for {@link Scraper}
+ */
+public interface ScraperMBean {
- private static final Logger LOGGER = LoggerFactory.getLogger(ScraperRunner.class);
+ boolean isRunning();
- public static void main(String[] args) throws IOException {
- ScraperConfiguration configuration = ScraperConfiguration.fromFile("plc4j/utils/scraper/src/test/resources/example.yml");
- Scraper scraper = new Scraper(configuration, (j, a, m) -> LOGGER.info("Results from {}/{}: {}", j, a, m));
+ int getNumberOfActiveTasks();
- scraper.start();
- }
}
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScraperTask.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScraperTask.java
index f3240cf..ded9a87 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScraperTask.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScraperTask.java
@@ -32,6 +32,7 @@ import org.apache.plc4x.java.api.types.PlcResponseCode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.util.Collections;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
@@ -46,7 +47,7 @@ import java.util.stream.Collectors;
* One {@link ScrapeJob} gets split into multiple tasks.
* One task for each source that is defined in the {@link org.apache.plc4x.java.scraper.config.JobConfiguration}.
*/
-public class ScraperTask implements Runnable {
+public class ScraperTask implements Runnable, ScraperTaskMBean {
private static final Logger LOGGER = LoggerFactory.getLogger(ScraperTask.class);
@@ -194,4 +195,27 @@ public class ScraperTask implements Runnable {
LOGGER.warn("Handling error responses: {}", failed);
}
+
+ //---------------------------------
+ // JMX Monitoring
+ //---------------------------------
+ @Override
+ public long getScrapesTotal() {
+ return requestCounter.get();
+ }
+
+ @Override
+ public long getScrapesSuccess() {
+ return successCounter.get();
+ }
+
+ @Override
+ public String[] getPercentiles() {
+ String[] percentiles = new String[10];
+ for (int i = 1; i <= 10; i += 1) {
+ percentiles[i - 1] = String.format("%d%%: %s ms", 10 * i, latencyStatistics.getPercentile(10.0 * i) * 1e-6);
+ }
+ return percentiles;
+ }
+
}
diff --git a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperRunner.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScraperTaskMBean.java
similarity index 58%
copy from plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperRunner.java
copy to plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScraperTaskMBean.java
index 029a25a..08acc43 100644
--- a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperRunner.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScraperTaskMBean.java
@@ -19,20 +19,17 @@
package org.apache.plc4x.java.scraper;
-import org.apache.plc4x.java.scraper.config.ScraperConfiguration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+/**
+ * MBean for a scrape job.
+ */
+public interface ScraperTaskMBean {
-import java.io.IOException;
+ long getScrapesTotal();
-public class ScraperRunner {
+ long getScrapesSuccess();
- private static final Logger LOGGER = LoggerFactory.getLogger(ScraperRunner.class);
+ double getPercentageFailed();
- public static void main(String[] args) throws IOException {
- ScraperConfiguration configuration = ScraperConfiguration.fromFile("plc4j/utils/scraper/src/test/resources/example.yml");
- Scraper scraper = new Scraper(configuration, (j, a, m) -> LOGGER.info("Results from {}/{}: {}", j, a, m));
+ String[] getPercentiles();
- scraper.start();
- }
}
diff --git a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperRunner.java b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperRunner.java
index 029a25a..39dbdf7 100644
--- a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperRunner.java
+++ b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperRunner.java
@@ -20,6 +20,7 @@
package org.apache.plc4x.java.scraper;
import org.apache.plc4x.java.scraper.config.ScraperConfiguration;
+import org.apache.plc4x.java.scraper.config.ScraperConfigurationBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -29,9 +30,21 @@ public class ScraperRunner {
private static final Logger LOGGER = LoggerFactory.getLogger(ScraperRunner.class);
- public static void main(String[] args) throws IOException {
- ScraperConfiguration configuration = ScraperConfiguration.fromFile("plc4j/utils/scraper/src/test/resources/example.yml");
- Scraper scraper = new Scraper(configuration, (j, a, m) -> LOGGER.info("Results from {}/{}: {}", j, a, m));
+ public static void main(String[] args) {
+ ScraperConfigurationBuilder builder = new ScraperConfigurationBuilder();
+ ScraperConfiguration conf = builder
+ .addSource("source", "test:123")
+ .addSource("source2", "test:456")
+ .job("job1", 100)
+ .source("source")
+ .source("source2")
+ .field("field1", "RANDOM/test:INTEGER")
+ .build()
+ .build();
+
+ // Alternativ: Load from a file.
+ // ScraperConfiguration configuration = ScraperConfiguration.fromFile("plc4j/utils/scraper/src/test/resources/example.yml");
+ Scraper scraper = new Scraper(conf, (j, a, m) -> LOGGER.info("Results from {}/{}: {}", j, a, m));
scraper.start();
}