You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@plc4x.apache.org by tm...@apache.org on 2019/08/11 18:54:41 UTC

[plc4x] 02/03: some refactoring

This is an automated email from the ASF dual-hosted git repository.

tmitsch pushed a commit to branch feature/improve-scraper-tim
in repository https://gitbox.apache.org/repos/asf/plc4x.git

commit 1c1802b2a06ccbd2db96171cc2bb98208616125c
Author: Tim Mitsch <t....@pragmaticindustries.de>
AuthorDate: Sun Aug 11 19:37:16 2019 +0200

    some refactoring
---
 .../java/org/apache/plc4x/Plc4XBaseTableTest.java  |   4 +-
 .../org/apache/plc4x/java/scraper/ScrapeJob.java   |   7 +-
 .../apache/plc4x/java/scraper/ScrapeJobImpl.java   |   7 +-
 .../apache/plc4x/java/scraper/ScraperTaskImpl.java |  47 ++++++---
 .../java/scraper/config/JobConfiguration.java      |   7 +-
 .../config/JobConfigurationClassicImpl.java        |  49 +++++++++
 ...ava => JobConfigurationClassicImplBuilder.java} |  20 ++--
 .../java/scraper/config/JobConfigurationImpl.java  |  35 ++++---
 .../java/scraper/config/ScraperConfiguration.java  |  97 ++----------------
 .../config/ScraperConfigurationClassicImpl.java    |  86 ++++++++++++++++
 ...=> ScraperConfigurationClassicImplBuilder.java} |  16 +--
 .../JobConfigurationTriggeredImpl.java             |  47 +++++++++
 ...a => JobConfigurationTriggeredImplBuilder.java} |  16 +--
 ...java => ScraperConfigurationTriggeredImpl.java} |  99 +++++++-----------
 ... ScraperConfigurationTriggeredImplBuilder.java} |  18 ++--
 .../TriggeredJobConfiguration.java                 |  74 --------------
 .../triggeredscraper/TriggeredScrapeJobImpl.java   |   3 +-
 .../triggeredscraper/TriggeredScraperImpl.java     |  53 +++++++---
 .../triggeredscraper/TriggeredScraperTask.java     |  39 +++----
 .../triggerhandler/TriggerConfiguration.java       |  99 +++++++++---------
 .../triggerhandler/collector/TriggerCollector.java |   3 +-
 .../collector/TriggerCollectorImpl.java            | 112 +++++++++------------
 .../java/scraper/ScraperConfigurationTest.java     |  21 ++--
 .../apache/plc4x/java/scraper/ScraperRunner.java   |   3 +-
 .../plc4x/java/scraper/TriggeredScraperRunner.java |   5 +-
 .../java/scraper/TriggeredScraperRunnerModbus.java |   5 +-
 .../config/ScraperConfigurationBuilderTest.java    |  12 +--
 .../triggeredscraper/TriggeredScraperImplTest.java |   7 +-
 .../triggerhandler/TriggerConfigurationTest.java   |   5 +-
 29 files changed, 527 insertions(+), 469 deletions(-)

diff --git a/plc4j/integrations/apache-calcite/src/test/java/org/apache/plc4x/Plc4XBaseTableTest.java b/plc4j/integrations/apache-calcite/src/test/java/org/apache/plc4x/Plc4XBaseTableTest.java
index 3bc3007..d80cc6b 100644
--- a/plc4j/integrations/apache-calcite/src/test/java/org/apache/plc4x/Plc4XBaseTableTest.java
+++ b/plc4j/integrations/apache-calcite/src/test/java/org/apache/plc4x/Plc4XBaseTableTest.java
@@ -19,7 +19,7 @@ under the License.
 package org.apache.plc4x;
 
 import org.apache.calcite.linq4j.Enumerator;
-import org.apache.plc4x.java.scraper.config.JobConfigurationImpl;
+import org.apache.plc4x.java.scraper.config.JobConfigurationClassicImpl;
 import org.assertj.core.api.WithAssertions;
 import org.junit.jupiter.api.Test;
 
@@ -33,7 +33,7 @@ class Plc4XBaseTableTest implements WithAssertions {
     @Test
     void testOnBlockingQueue() {
         ArrayBlockingQueue<Plc4xSchema.Record> queue = new ArrayBlockingQueue<>(100);
-        Plc4xStreamTable table = new Plc4xStreamTable(queue, new JobConfigurationImpl("job1", 100,
+        Plc4xStreamTable table = new Plc4xStreamTable(queue, new JobConfigurationClassicImpl("job1", 100,
             Collections.emptyList(),
             Collections.singletonMap("key", "address")));
 
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScrapeJob.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScrapeJob.java
index dd46f5f..6cde8d6 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScrapeJob.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScrapeJob.java
@@ -19,12 +19,15 @@
 
 package org.apache.plc4x.java.scraper;
 
+import org.apache.plc4x.java.scraper.config.JobConfigurationClassicImpl;
+import org.apache.plc4x.java.scraper.config.ScraperConfigurationClassicImpl;
+
 import java.util.Map;
 
 /**
  * POJO Object to transport all Job information.
- * Is generated from {@link org.apache.plc4x.java.scraper.config.ScraperConfiguration} by
- * merging the sources and the {@link org.apache.plc4x.java.scraper.config.JobConfigurationImpl}.
+ * Is generated from {@link ScraperConfigurationClassicImpl} by
+ * merging the sources and the {@link JobConfigurationClassicImpl}.
  */
 public interface ScrapeJob {
     String getJobName();
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScrapeJobImpl.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScrapeJobImpl.java
index b541e47..9b0c181 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScrapeJobImpl.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScrapeJobImpl.java
@@ -19,14 +19,15 @@
 
 package org.apache.plc4x.java.scraper;
 
-import org.apache.plc4x.java.scraper.config.JobConfigurationImpl;
+import org.apache.plc4x.java.scraper.config.JobConfigurationClassicImpl;
+import org.apache.plc4x.java.scraper.config.ScraperConfigurationClassicImpl;
 
 import java.util.Map;
 
 /**
  * POJO Object to transport all Job information.
- * Is generated from {@link org.apache.plc4x.java.scraper.config.ScraperConfiguration} by
- * merging the sources and the {@link JobConfigurationImpl}.
+ * Is generated from {@link ScraperConfigurationClassicImpl} by
+ * merging the sources and the {@link JobConfigurationClassicImpl}.
  *
  * @deprecated Scraper is deprecated please use {@link org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScrapeJobImpl} instead all functions are supplied as well see java-doc of {@link org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScraperImpl}
  */
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScraperTaskImpl.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScraperTaskImpl.java
index f15ee84..4c5a270 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScraperTaskImpl.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/ScraperTaskImpl.java
@@ -29,7 +29,7 @@ import org.apache.plc4x.java.api.exceptions.PlcRuntimeException;
 import org.apache.plc4x.java.api.messages.PlcReadRequest;
 import org.apache.plc4x.java.api.messages.PlcReadResponse;
 import org.apache.plc4x.java.api.types.PlcResponseCode;
-import org.apache.plc4x.java.scraper.config.JobConfigurationImpl;
+import org.apache.plc4x.java.scraper.config.JobConfigurationClassicImpl;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -45,7 +45,7 @@ import java.util.stream.Collectors;
 /**
  * Plc Scraper Task that scrapes one source.
  * One {@link ScrapeJobImpl} gets split into multiple tasks.
- * One task for each source that is defined in the {@link JobConfigurationImpl}.
+ * One task for each source that is defined in the {@link JobConfigurationClassicImpl}.
  *
  * @deprecated Scraper is deprecated please use {@link org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScrapeJobImpl} instead all functions are supplied as well see java-doc of {@link org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScraperImpl}
  */
@@ -93,10 +93,13 @@ public class ScraperTaskImpl implements ScraperTask {
         this.resultHandler = resultHandler;
     }
 
+
     @Override
     public void run() {
         // Does a single fetch
-        LOGGER.trace("Start new scrape of task of job {} for connection {}", jobName, connectionAlias);
+        if(LOGGER.isDebugEnabled()) {
+            LOGGER.debug("Start new scrape of task of job {} for connection {}", jobName, connectionAlias);
+        }
         requestCounter.incrementAndGet();
         StopWatch stopWatch = new StopWatch();
         stopWatch.start();
@@ -111,15 +114,17 @@ public class ScraperTaskImpl implements ScraperTask {
                 }
             }, handlerService);
             connection = future.get(10*requestTimeoutMs, TimeUnit.MILLISECONDS);
-            LOGGER.trace("Connection to {} established: {}", connectionString, connection);
-            PlcReadResponse response;
+            LOGGER.debug("Connection to {} established: {}", connectionString, connection);
+            PlcReadResponse plcReadResponse;
             try {
-                PlcReadRequest.Builder builder = connection.readRequestBuilder();
-                fields.forEach((alias,qry) -> {
+                //build read request
+                PlcReadRequest.Builder readRequestBuilder = connection.readRequestBuilder();
+                //add fields to be acquired to builder
+                fields.forEach((alias, qry) -> {
                     LOGGER.trace("Requesting: {} -> {}", alias, qry);
-                    builder.addItem(alias,qry);
+                    readRequestBuilder.addItem(alias, qry);
                 });
-                response = builder
+                plcReadResponse = readRequestBuilder
                     .build()
                     .execute()
                     .get(requestTimeoutMs, TimeUnit.MILLISECONDS);
@@ -128,17 +133,20 @@ public class ScraperTaskImpl implements ScraperTask {
                 handleException(e);
                 return;
             }
-            // Add statistics
+
+            LOGGER.debug("Performing statistics");
+            // Add some statistics
             stopWatch.stop();
             latencyStatistics.addValue(stopWatch.getNanoTime());
             failedStatistics.addValue(0.0);
             successCounter.incrementAndGet();
             // Validate response
-            validateResponse(response);
+            validateResponse(plcReadResponse);
+
             // Handle response (Async)
-            CompletableFuture.runAsync(() -> resultHandler.handle(jobName, connectionAlias, transformResponseToMap(response)), handlerService);
+            CompletableFuture.runAsync(() -> resultHandler.handle(jobName, connectionAlias, transformResponseToMap(plcReadResponse)), handlerService);
         } catch (Exception e) {
-            LOGGER.debug("Exception during scrape", e);
+            LOGGER.warn("Exception during scraping of Job {}, Connection-Alias {}: Error-message: {} - for stack-trace change logging to DEBUG", jobName,connectionAlias,e.getMessage());
             handleException(e);
         } finally {
             if (connection != null) {
@@ -151,6 +159,10 @@ public class ScraperTaskImpl implements ScraperTask {
         }
     }
 
+    /**
+     * validate read response due to failed fields
+     * @param response acquired response
+     */
     private void validateResponse(PlcReadResponse response) {
         Map<String, PlcResponseCode> failedFields = response.getFieldNames().stream()
             .filter(name -> !PlcResponseCode.OK.equals(response.getResponseCode(name)))
@@ -163,6 +175,11 @@ public class ScraperTaskImpl implements ScraperTask {
         }
     }
 
+    /**
+     * transforms the read-response to a Map of String (Key) and Object(Value)
+     * @param response response from PLC
+     * @return transformed Map
+     */
     private Map<String, Object> transformResponseToMap(PlcReadResponse response) {
         return response.getFieldNames().stream()
             .collect(Collectors.toMap(
@@ -203,7 +220,9 @@ public class ScraperTaskImpl implements ScraperTask {
 
     @Override
     public void handleException(Exception e) {
-        LOGGER.debug("Exception: ", e);
+        if(LOGGER.isDebugEnabled()) {
+            LOGGER.debug("Detailed exception occurred at scraping", e);
+        }
         failedStatistics.addValue(1.0);
     }
 
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfiguration.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfiguration.java
index 98b5b79..9c84b0b 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfiguration.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfiguration.java
@@ -23,11 +23,16 @@ import java.util.List;
 import java.util.Map;
 
 /**
- * Created by timbo on 2019-03-05
+ * basic interface for the configuration of a scrape job
  */
 public interface JobConfiguration {
     String getName();
 
+    String getTriggerConfig();
+
+    @Deprecated
+    Integer getScrapeRate();
+
     List<String> getSources();
 
     Map<String, String> getFields();
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfigurationClassicImpl.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfigurationClassicImpl.java
new file mode 100644
index 0000000..6688363
--- /dev/null
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfigurationClassicImpl.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.plc4x.java.scraper.config;
+
+import org.apache.plc4x.java.scraper.ScrapeJobImpl;
+import org.apache.plc4x.java.scraper.config.triggeredscraper.JobConfigurationTriggeredImplBuilder;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Configuration for one {@link ScrapeJobImpl} in the @{@link ScraperConfigurationClassicImpl}.
+ *
+ * @deprecated Scraper is deprecated please use {@link JobConfigurationTriggeredImplBuilder} instead all functions are supplied as well see java-doc of {@link org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScraperImpl}
+ */
+@Deprecated
+public class JobConfigurationClassicImpl extends JobConfigurationImpl {
+
+
+    /**
+     * Default constructor
+     *
+     * @param name          Job Name / identifier
+     * @param triggerConfig configuration string for triggered jobs
+     * @param scrapeRate
+     * @param sources       source alias (<b>not</b> connection string but the alias (from @{@link ScraperConfigurationClassicImpl}).
+     * @param fields        Map from field alias (how it is named in the result map) to plc4x field query
+     */
+    public JobConfigurationClassicImpl(String name, String triggerConfig, Integer scrapeRate, List<String> sources, Map<String, String> fields) {
+        super(name, triggerConfig, scrapeRate, sources, fields);
+    }
+}
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfigurationImplBuilder.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfigurationClassicImplBuilder.java
similarity index 62%
rename from plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfigurationImplBuilder.java
rename to plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfigurationClassicImplBuilder.java
index 418d2fb..bbbcb2a 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfigurationImplBuilder.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfigurationClassicImplBuilder.java
@@ -19,45 +19,47 @@
 
 package org.apache.plc4x.java.scraper.config;
 
+import org.apache.plc4x.java.scraper.config.triggeredscraper.JobConfigurationTriggeredImpl;
+
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
 /**
- * @deprecated Scraper is deprecated please use {@link org.apache.plc4x.java.scraper.config.triggeredscraper.TriggeredJobConfiguration} instead all functions are supplied as well see java-doc of {@link org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScraperImpl}
+ * @deprecated Scraper is deprecated please use {@link JobConfigurationTriggeredImpl} instead all functions are supplied as well see java-doc of {@link org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScraperImpl}
  */
 @Deprecated
-public class JobConfigurationImplBuilder {
+public class JobConfigurationClassicImplBuilder {
 
-    private final ScraperConfigurationBuilder parent;
+    private final ScraperConfigurationClassicImplBuilder parent;
     private final String name;
     private final int scrapeRateMs;
 
     private final List<String> sources = new ArrayList<>();
     private final Map<String, String> fields = new HashMap<>();
 
-    public JobConfigurationImplBuilder(ScraperConfigurationBuilder parent, String name, int scrapeRateMs) {
+    public JobConfigurationClassicImplBuilder(ScraperConfigurationClassicImplBuilder parent, String name, int scrapeRateMs) {
         this.parent = parent;
         this.name = name;
         this.scrapeRateMs = scrapeRateMs;
     }
 
-    public JobConfigurationImplBuilder source(String alias) {
+    public JobConfigurationClassicImplBuilder source(String alias) {
         this.sources.add(alias);
         return this;
     }
 
-    public JobConfigurationImplBuilder field(String alias, String fieldQuery) {
+    public JobConfigurationClassicImplBuilder field(String alias, String fieldQuery) {
         this.fields.put(alias, fieldQuery);
         return this;
     }
 
-    private JobConfigurationImpl buildInternal() {
-        return new JobConfigurationImpl(name, scrapeRateMs, sources, fields);
+    private JobConfigurationClassicImpl buildInternal() {
+        return new JobConfigurationClassicImpl(name,null, scrapeRateMs, sources, fields);
     }
 
-    public ScraperConfigurationBuilder build() {
+    public ScraperConfigurationClassicImplBuilder build() {
         parent.addJobConfiguration(this.buildInternal());
         return this.parent;
     }
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfigurationImpl.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfigurationImpl.java
index 044a93b..81438b9 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfigurationImpl.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/JobConfigurationImpl.java
@@ -21,37 +21,36 @@ package org.apache.plc4x.java.scraper.config;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-import org.apache.plc4x.java.scraper.ScrapeJobImpl;
+import org.apache.plc4x.java.api.exceptions.PlcNotImplementedException;
 
 import java.util.List;
 import java.util.Map;
 
 /**
- * Configuration for one {@link ScrapeJobImpl} in the @{@link ScraperConfiguration}.
- *
- * @deprecated Scraper is deprecated please use {@link org.apache.plc4x.java.scraper.config.triggeredscraper.TriggeredJobConfigurationBuilder} instead all functions are supplied as well see java-doc of {@link org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScraperImpl}
+ * abstract configuration for scrape-job configuration
  */
-@Deprecated
 public class JobConfigurationImpl implements JobConfiguration {
-
-    private final String name;
-    private final int scrapeRate;
-    private final List<String> sources;
-    private final Map<String, String> fields;
+    protected final String name;
+    protected final String triggerConfig;
+    protected final Integer scrapeRate;
+    protected final List<String> sources;
+    protected final Map<String, String> fields;
 
     /**
      * Default constructor
      * @param name Job Name / identifier
-     * @param scrapeRate Scrape rate in ms
-     * @param sources source alias (<b>not</b> connection string but the alias (from @{@link ScraperConfiguration}).
+     * @param triggerConfig configuration string for triggered jobs
+     * @param sources source alias (<b>not</b> connection string but the alias (from @{@link ScraperConfigurationClassicImpl}).
      * @param fields Map from field alias (how it is named in the result map) to plc4x field query
      */
     @JsonCreator
     public JobConfigurationImpl(@JsonProperty(value = "name", required = true) String name,
-                                @JsonProperty(value = "scrapeRate", required = true) int scrapeRate,
+                                @JsonProperty(value = "triggerConfig") String triggerConfig,
+                                @JsonProperty(value = "scrapeRate") Integer scrapeRate,
                                 @JsonProperty(value = "sources", required = true) List<String> sources,
                                 @JsonProperty(value = "fields", required = true) Map<String, String> fields) {
         this.name = name;
+        this.triggerConfig = triggerConfig;
         this.scrapeRate = scrapeRate;
         this.sources = sources;
         this.fields = fields;
@@ -62,8 +61,9 @@ public class JobConfigurationImpl implements JobConfiguration {
         return name;
     }
 
-    public int getScrapeRate() {
-        return scrapeRate;
+    @Override
+    public String getTriggerConfig() {
+        return triggerConfig;
     }
 
     @Override
@@ -75,4 +75,9 @@ public class JobConfigurationImpl implements JobConfiguration {
     public Map<String, String> getFields() {
         return fields;
     }
+
+    @Override
+    public Integer getScrapeRate() {
+        return scrapeRate;
+    }
 }
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/ScraperConfiguration.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/ScraperConfiguration.java
index 9b0804b..c3cb2da 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/ScraperConfiguration.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/ScraperConfiguration.java
@@ -19,85 +19,43 @@
 
 package org.apache.plc4x.java.scraper.config;
 
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.exc.MismatchedInputException;
 import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
 import org.apache.plc4x.java.scraper.ScrapeJob;
-import org.apache.plc4x.java.scraper.ScrapeJobImpl;
 import org.apache.plc4x.java.scraper.exception.ScraperConfigurationException;
 import org.apache.plc4x.java.scraper.exception.ScraperException;
-import org.apache.plc4x.java.scraper.ScraperImpl;
-import org.apache.plc4x.java.scraper.config.triggeredscraper.TriggeredJobConfiguration;
-import org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScrapeJobImpl;
 
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
-import java.util.function.Function;
-import java.util.stream.Collectors;
 
 /**
- * Configuration class for {@link ScraperImpl}.
- *
- * @deprecated Scraper is deprecated please use {@link org.apache.plc4x.java.scraper.config.triggeredscraper.TriggeredScraperConfiguration} instead all functions are supplied as well see java-doc of {@link org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScraperImpl}
+ * interface for basic configuration of scraper
  */
-@Deprecated
-public class ScraperConfiguration {
-
-    private final Map<String, String> sources;
-    private final List<JobConfigurationImpl> jobConfigurations;
-
-    /**
-     * Default constructor.
-     *
-     * @param sources           Map from connection alias to connection string
-     * @param jobConfigurations List of configurations one for each Job
-     */
-    @JsonCreator
-    public ScraperConfiguration(@JsonProperty(value = "sources", required = true) Map<String, String> sources,
-                                @JsonProperty(value = "jobs", required = true) List<JobConfigurationImpl> jobConfigurations) {
-        checkNoUnreferencedSources(sources, jobConfigurations);
-        // TODO Warning on too many sources?!
-        this.sources = sources;
-        this.jobConfigurations = jobConfigurations;
-    }
-
-    private void checkNoUnreferencedSources(Map<String, String> sources, List<JobConfigurationImpl> jobConfigurations) {
-        Set<String> unreferencedSources = jobConfigurations.stream()
-            .flatMap(job -> job.getSources().stream())
-            .filter(source -> !sources.containsKey(source))
-            .collect(Collectors.toSet());
-        if (!unreferencedSources.isEmpty()) {
-            throw new ScraperConfigurationException("There are the following unreferenced sources: " + unreferencedSources);
-        }
-    }
-
-    public static ScraperConfiguration fromYaml(String yaml) {
+public interface ScraperConfiguration {
+    static <T>T fromYaml(String yaml, Class<T> clazz) {
         ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
         try {
-            return mapper.readValue(yaml, ScraperConfiguration.class);
+            return mapper.readValue(yaml, clazz);
         } catch (IOException e) {
             throw new ScraperConfigurationException("Unable to parse given yaml configuration!", e);
         }
     }
 
-    public static ScraperConfiguration fromJson(String json) {
+    static <T>T fromJson(String json, Class<T> clazz) {
         ObjectMapper mapper = new ObjectMapper(new JsonFactory());
         try {
-            return mapper.readValue(json, ScraperConfiguration.class);
+            return mapper.readValue(json, clazz);
         } catch (IOException e) {
             throw new ScraperConfigurationException("Unable to parse given json configuration!", e);
         }
     }
 
-    public static ScraperConfiguration fromFile(String path) throws IOException {
+    static <T>T fromFile(String path, Class<T> clazz) throws IOException {
         ObjectMapper mapper;
         if (path.endsWith("json")) {
             mapper = new ObjectMapper(new JsonFactory());
@@ -107,7 +65,7 @@ public class ScraperConfiguration {
             throw new ScraperConfigurationException("Only files with extensions json, yml or yaml can be read");
         }
         try {
-            return mapper.readValue(new File(path), ScraperConfiguration.class);
+            return mapper.readValue(new File(path), clazz);
         } catch (FileNotFoundException e) {
             throw new ScraperConfigurationException("Unable to find configuration given configuration file at '" + path + "'", e);
         } catch (MismatchedInputException e) {
@@ -115,42 +73,9 @@ public class ScraperConfiguration {
         }
     }
 
-    public Map<String, String> getSources() {
-        return sources;
-    }
+    Map<String, String> getSources();
 
-    public List<JobConfigurationImpl> getJobConfigurations() {
-        return jobConfigurations;
-    }
+    List<JobConfigurationImpl> getJobConfigurations();
 
-    public List<ScrapeJob> getJobs() throws ScraperException {
-        List<ScrapeJob> scrapeJobs = new ArrayList<>();
-        for(JobConfiguration jobConfiguration:jobConfigurations){
-            if(jobConfiguration instanceof JobConfigurationImpl){
-                JobConfigurationImpl jobConfigurationImpl = (JobConfigurationImpl)jobConfiguration;
-                scrapeJobs.add(new ScrapeJobImpl(jobConfiguration.getName(),
-                    jobConfigurationImpl.getScrapeRate(),
-                    getSourcesForAliases(jobConfiguration.getSources()),
-                    jobConfiguration.getFields()));
-            }
-            else{
-                if(jobConfiguration instanceof TriggeredJobConfiguration){
-                    TriggeredJobConfiguration triggeredJobConfiguration = (TriggeredJobConfiguration) jobConfiguration;
-                    scrapeJobs.add(new TriggeredScrapeJobImpl(jobConfiguration.getName(),
-                        triggeredJobConfiguration.getTriggerConfig(),
-                        getSourcesForAliases(jobConfiguration.getSources()),
-                        jobConfiguration.getFields()));
-                }
-            }
-        }
-        return scrapeJobs;
-    }
-
-    private Map<String, String> getSourcesForAliases(List<String> aliases) {
-        return aliases.stream()
-            .collect(Collectors.toMap(
-                Function.identity(),
-                sources::get
-            ));
-    }
+    List<ScrapeJob> getJobs() throws ScraperException;
 }
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/ScraperConfigurationClassicImpl.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/ScraperConfigurationClassicImpl.java
new file mode 100644
index 0000000..aafa373
--- /dev/null
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/ScraperConfigurationClassicImpl.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.plc4x.java.scraper.config;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.plc4x.java.scraper.ScrapeJob;
+import org.apache.plc4x.java.scraper.config.triggeredscraper.ScraperConfigurationTriggeredImpl;
+import org.apache.plc4x.java.scraper.exception.ScraperConfigurationException;
+import org.apache.plc4x.java.scraper.exception.ScraperException;
+import org.apache.plc4x.java.scraper.ScraperImpl;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * Configuration class for {@link ScraperImpl}.
+ *
+ * @deprecated Scraper is deprecated please use {@link ScraperConfigurationTriggeredImpl} instead all functions are supplied as well see java-doc of {@link org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScraperImpl}
+ */
+@Deprecated
+public class ScraperConfigurationClassicImpl implements ScraperConfiguration {
+
+    private final Map<String, String> sources;
+    private final List<JobConfigurationImpl> jobConfigurations;
+
+    /**
+     * Default constructor.
+     *
+     * @param sources           Map from connection alias to connection string
+     * @param jobConfigurations List of configurations one for each Job
+     */
+    @JsonCreator
+    public ScraperConfigurationClassicImpl(@JsonProperty(value = "sources", required = true) Map<String, String> sources,
+                                           @JsonProperty(value = "jobs", required = true) List<JobConfigurationImpl> jobConfigurations) {
+        checkNoUnreferencedSources(sources, jobConfigurations);
+        this.sources = sources;
+        this.jobConfigurations = jobConfigurations;
+    }
+
+    private void checkNoUnreferencedSources(Map<String, String> sources, List<JobConfigurationImpl> jobConfigurations) {
+        Set<String> unreferencedSources = jobConfigurations.stream()
+            .flatMap(job -> job.getSources().stream())
+            .filter(source -> !sources.containsKey(source))
+            .collect(Collectors.toSet());
+        if (!unreferencedSources.isEmpty()) {
+            throw new ScraperConfigurationException("There are the following unreferenced sources: " + unreferencedSources);
+        }
+    }
+
+    @Override
+    public Map<String, String> getSources() {
+        return sources;
+    }
+
+    @Override
+    public List<JobConfigurationImpl> getJobConfigurations() {
+        return jobConfigurations;
+    }
+
+    @Override
+    public List<ScrapeJob> getJobs() throws ScraperException {
+        return ScraperConfigurationTriggeredImpl.getJobs(jobConfigurations,sources);
+    }
+
+
+}
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/ScraperConfigurationBuilder.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/ScraperConfigurationClassicImplBuilder.java
similarity index 61%
rename from plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/ScraperConfigurationBuilder.java
rename to plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/ScraperConfigurationClassicImplBuilder.java
index f0cb3b9..e64a009 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/ScraperConfigurationBuilder.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/ScraperConfigurationClassicImplBuilder.java
@@ -19,34 +19,36 @@
 
 package org.apache.plc4x.java.scraper.config;
 
+import org.apache.plc4x.java.scraper.config.triggeredscraper.ScraperConfigurationTriggeredImplBuilder;
+
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
 /**
- * @deprecated Scraper is deprecated please use {@link org.apache.plc4x.java.scraper.config.triggeredscraper.TriggeredScraperConfigurationBuilder} instead all functions are supplied as well see java-doc of {@link org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScraperImpl}
+ * @deprecated Scraper is deprecated please use {@link ScraperConfigurationTriggeredImplBuilder} instead all functions are supplied as well see java-doc of {@link org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScraperImpl}
  */
 @Deprecated
-public class ScraperConfigurationBuilder {
+public class ScraperConfigurationClassicImplBuilder {
 
     private final Map<String, String> sources = new HashMap<>();
     private final List<JobConfigurationImpl> jobConfigurations = new ArrayList<>();
 
-    public ScraperConfigurationBuilder addSource(String alias, String connectionString) {
+    public ScraperConfigurationClassicImplBuilder addSource(String alias, String connectionString) {
         sources.put(alias, connectionString);
         return this;
     }
 
-    public JobConfigurationImplBuilder job(String name, int scrapeRateMs) {
-        return new JobConfigurationImplBuilder(this, name, scrapeRateMs);
+    public JobConfigurationClassicImplBuilder job(String name, int scrapeRateMs) {
+        return new JobConfigurationClassicImplBuilder(this, name, scrapeRateMs);
     }
 
     public ScraperConfiguration build() {
-        return new ScraperConfiguration(sources, jobConfigurations);
+        return new ScraperConfigurationClassicImpl(sources, jobConfigurations);
     }
 
-    public void addJobConfiguration(JobConfigurationImpl configuration) {
+    public void addJobConfiguration(JobConfigurationClassicImpl configuration) {
         this.jobConfigurations.add(configuration);
     }
 }
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/JobConfigurationTriggeredImpl.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/JobConfigurationTriggeredImpl.java
new file mode 100644
index 0000000..48d850c
--- /dev/null
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/JobConfigurationTriggeredImpl.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.plc4x.java.scraper.config.triggeredscraper;
+
+
+import org.apache.plc4x.java.scraper.config.JobConfigurationImpl;
+import org.apache.plc4x.java.scraper.config.ScraperConfigurationClassicImpl;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Configuration for one {@link org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScrapeJobImpl} in the {@link ScraperConfigurationTriggeredImpl}.
+ */
+public class JobConfigurationTriggeredImpl extends JobConfigurationImpl {
+
+
+    /**
+     * Default constructor
+     *
+     * @param name          Job Name / identifier
+     * @param triggerConfig configuration string for triggered jobs
+     * @param scrapeRate
+     * @param sources       source alias (<b>not</b> connection string but the alias (from @{@link ScraperConfigurationClassicImpl}).
+     * @param fields        Map from field alias (how it is named in the result map) to plc4x field query
+     */
+    public JobConfigurationTriggeredImpl(String name, String triggerConfig, Integer scrapeRate, List<String> sources, Map<String, String> fields) {
+        super(name, triggerConfig, scrapeRate, sources, fields);
+    }
+}
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/TriggeredJobConfigurationBuilder.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/JobConfigurationTriggeredImplBuilder.java
similarity index 76%
rename from plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/TriggeredJobConfigurationBuilder.java
rename to plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/JobConfigurationTriggeredImplBuilder.java
index 817c3c9..48b7bd6 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/TriggeredJobConfigurationBuilder.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/JobConfigurationTriggeredImplBuilder.java
@@ -26,16 +26,16 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-public class TriggeredJobConfigurationBuilder {
+public class JobConfigurationTriggeredImplBuilder {
 
-    private final TriggeredScraperConfigurationBuilder parent;
+    private final ScraperConfigurationTriggeredImplBuilder parent;
     private final String name;
     private final String triggerConfig;
 
     private final List<String> sources = new ArrayList<>();
     private final Map<String, String> fields = new HashMap<>();
 
-    public TriggeredJobConfigurationBuilder(TriggeredScraperConfigurationBuilder parent, String name, String triggerConfig) {
+    public JobConfigurationTriggeredImplBuilder(ScraperConfigurationTriggeredImplBuilder parent, String name, String triggerConfig) {
         if(parent==null){
             throw new ScraperConfigurationException("parent builder cannot be null");
         }
@@ -47,7 +47,7 @@ public class TriggeredJobConfigurationBuilder {
         this.triggerConfig = triggerConfig;
     }
 
-    public TriggeredJobConfigurationBuilder source(String alias) {
+    public JobConfigurationTriggeredImplBuilder source(String alias) {
         if(alias==null || alias.isEmpty()){
             throw new ScraperConfigurationException("source alias cannot be null or empty");
         }
@@ -55,16 +55,16 @@ public class TriggeredJobConfigurationBuilder {
         return this;
     }
 
-    public TriggeredJobConfigurationBuilder field(String alias, String fieldQuery) {
+    public JobConfigurationTriggeredImplBuilder field(String alias, String fieldQuery) {
         this.fields.put(alias, fieldQuery);
         return this;
     }
 
-    private TriggeredJobConfiguration buildInternal() {
-        return new TriggeredJobConfiguration(name, triggerConfig, sources, fields);
+    private JobConfigurationTriggeredImpl buildInternal() {
+        return new JobConfigurationTriggeredImpl(name, triggerConfig,null, sources, fields);
     }
 
-    public TriggeredScraperConfigurationBuilder build() {
+    public ScraperConfigurationTriggeredImplBuilder build() {
         parent.addJobConfiguration(this.buildInternal());
         return this.parent;
     }
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/TriggeredScraperConfiguration.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/ScraperConfigurationTriggeredImpl.java
similarity index 50%
rename from plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/TriggeredScraperConfiguration.java
rename to plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/ScraperConfigurationTriggeredImpl.java
index 40e66ee..af552a3 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/TriggeredScraperConfiguration.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/ScraperConfigurationTriggeredImpl.java
@@ -21,21 +21,18 @@ package org.apache.plc4x.java.scraper.config.triggeredscraper;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.core.JsonFactory;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.exc.MismatchedInputException;
-import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
 import org.apache.plc4x.java.scraper.ScrapeJob;
 import org.apache.plc4x.java.scraper.ScrapeJobImpl;
+import org.apache.plc4x.java.scraper.config.JobConfigurationImpl;
+import org.apache.plc4x.java.scraper.config.ScraperConfiguration;
 import org.apache.plc4x.java.scraper.exception.ScraperException;
 import org.apache.plc4x.java.scraper.config.JobConfiguration;
-import org.apache.plc4x.java.scraper.config.JobConfigurationImpl;
+import org.apache.plc4x.java.scraper.config.JobConfigurationClassicImpl;
 import org.apache.plc4x.java.scraper.exception.ScraperConfigurationException;
 import org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScrapeJobImpl;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
@@ -46,10 +43,11 @@ import java.util.stream.Collectors;
 /**
  * Configuration class for {@link org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScraperImpl}.
  */
-public class TriggeredScraperConfiguration {
+public class ScraperConfigurationTriggeredImpl implements ScraperConfiguration {
+    private static final Logger logger = LoggerFactory.getLogger( ScraperConfigurationTriggeredImpl.class );
 
     private final Map<String, String> sources;
-    private final List<TriggeredJobConfiguration> jobConfigurations;
+    private final List<JobConfigurationImpl> jobConfigurations;
 
     /**
      * Default constructor.
@@ -58,14 +56,14 @@ public class TriggeredScraperConfiguration {
      * @param jobConfigurations List of configurations one for each Job
      */
     @JsonCreator
-    public TriggeredScraperConfiguration(@JsonProperty(value = "sources", required = true) Map<String, String> sources,
-                                         @JsonProperty(value = "jobs", required = true) List<TriggeredJobConfiguration> jobConfigurations) {
+    public ScraperConfigurationTriggeredImpl(@JsonProperty(value = "sources", required = true) Map<String, String> sources,
+                                             @JsonProperty(value = "jobs", required = true) List<JobConfigurationImpl> jobConfigurations) {
         checkNoUnreferencedSources(sources, jobConfigurations);
         this.sources = sources;
         this.jobConfigurations = jobConfigurations;
     }
 
-    private void checkNoUnreferencedSources(Map<String, String> sources, List<TriggeredJobConfiguration> jobConfigurations) {
+    private void checkNoUnreferencedSources(Map<String, String> sources, List<JobConfigurationImpl> jobConfigurations) {
         Set<String> unreferencedSources = jobConfigurations.stream()
             .flatMap(job -> job.getSources().stream())
             .filter(source -> !sources.containsKey(source))
@@ -75,74 +73,51 @@ public class TriggeredScraperConfiguration {
         }
     }
 
-    public static TriggeredScraperConfiguration fromYaml(String yaml) {
-        ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
-        try {
-            return mapper.readValue(yaml, TriggeredScraperConfiguration.class);
-        } catch (IOException e) {
-            throw new ScraperConfigurationException("Unable to parse given yaml configuration!", e);
-        }
-    }
-
-    public static TriggeredScraperConfiguration fromJson(String json) {
-        ObjectMapper mapper = new ObjectMapper(new JsonFactory());
-        try {
-            return mapper.readValue(json, TriggeredScraperConfiguration.class);
-        } catch (IOException e) {
-            throw new ScraperConfigurationException("Unable to parse given json configuration!", e);
-        }
-    }
-
-    public static TriggeredScraperConfiguration fromFile(String path) throws IOException {
-        ObjectMapper mapper;
-        if (path.endsWith("json")) {
-            mapper = new ObjectMapper(new JsonFactory());
-        } else if (path.endsWith("yml") || path.endsWith("yaml")) {
-            mapper = new ObjectMapper(new YAMLFactory());
-        } else {
-            throw new ScraperConfigurationException("Only files with extensions json, yml or yaml can be read");
-        }
-        try {
-            return mapper.readValue(new File(path), TriggeredScraperConfiguration.class);
-        } catch (FileNotFoundException e) {
-            throw new ScraperConfigurationException("Unable to find configuration given configuration file at '" + path + "'", e);
-        } catch (MismatchedInputException e) {
-            throw new ScraperConfigurationException("Given configuration is in wrong format!", e);
-        }
-    }
-
+    @Override
     public Map<String, String> getSources() {
         return sources;
     }
 
-    public List<TriggeredJobConfiguration> getJobConfigurations() {
+    @Override
+    public List<JobConfigurationImpl> getJobConfigurations() {
         return jobConfigurations;
     }
 
+    @Override
     public List<ScrapeJob> getJobs() throws ScraperException {
+        return getJobs(jobConfigurations,sources);
+    }
+
+    public static List<ScrapeJob> getJobs(List<JobConfigurationImpl> jobConfigurations, Map<String, String> sources) throws ScraperConfigurationException {
         List<ScrapeJob> scrapeJobs = new ArrayList<>();
         for(JobConfiguration jobConfiguration:jobConfigurations){
-            if(jobConfiguration instanceof TriggeredJobConfiguration){
-                JobConfigurationImpl jobConfigurationImpl = (JobConfigurationImpl)jobConfiguration;
-                scrapeJobs.add(new ScrapeJobImpl(jobConfiguration.getName(),
-                    jobConfigurationImpl.getScrapeRate(),
-                    getSourcesForAliases(jobConfiguration.getSources()),
+            if(jobConfiguration.getTriggerConfig()!=null){
+                logger.info("Assuming job as triggered job because triggerConfig has been set");
+                scrapeJobs.add(new TriggeredScrapeJobImpl(jobConfiguration.getName(),
+                    jobConfiguration.getTriggerConfig(),
+                    getSourcesForAliases(jobConfiguration.getSources(),sources),
                     jobConfiguration.getFields()));
             }
-            else{
-                if(jobConfiguration instanceof TriggeredJobConfiguration){
-                    TriggeredJobConfiguration triggeredJobConfiguration = (TriggeredJobConfiguration) jobConfiguration;
-                    scrapeJobs.add(new TriggeredScrapeJobImpl(jobConfiguration.getName(),
-                        triggeredJobConfiguration.getTriggerConfig(),
-                        getSourcesForAliases(jobConfiguration.getSources()),
+            else {
+                if(jobConfiguration.getScrapeRate()!=null){
+                    logger.info("Assuming job as classic job because triggerConfig has NOT been set but scrapeRate has.");
+                    scrapeJobs.add(new ScrapeJobImpl(
+                        jobConfiguration.getName(),
+                        jobConfiguration.getScrapeRate(),
+                        getSourcesForAliases(jobConfiguration.getSources(),sources),
                         jobConfiguration.getFields()));
                 }
+                else {
+                    logger.info("Job has lack of trigger/scheduled config");
+                    throw new ScraperConfigurationException(
+                        String.format("Job %s was intended to be o triggered annotation, but no triggerConfig-Field could be found. Canceling!",jobConfiguration.getName()));
+                }
             }
         }
         return scrapeJobs;
     }
 
-    private Map<String, String> getSourcesForAliases(List<String> aliases) {
+    private static Map<String, String> getSourcesForAliases(List<String> aliases, Map<String, String> sources) {
         return aliases.stream()
             .collect(Collectors.toMap(
                 Function.identity(),
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/TriggeredScraperConfigurationBuilder.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/ScraperConfigurationTriggeredImplBuilder.java
similarity index 62%
rename from plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/TriggeredScraperConfigurationBuilder.java
rename to plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/ScraperConfigurationTriggeredImplBuilder.java
index da02027..6c1dc64 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/TriggeredScraperConfigurationBuilder.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/ScraperConfigurationTriggeredImplBuilder.java
@@ -19,30 +19,32 @@
 
 package org.apache.plc4x.java.scraper.config.triggeredscraper;
 
+import org.apache.plc4x.java.scraper.config.JobConfigurationImpl;
+
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-public class TriggeredScraperConfigurationBuilder {
+public class ScraperConfigurationTriggeredImplBuilder {
 
     private final Map<String, String> sources = new HashMap<>();
-    private final List<TriggeredJobConfiguration> jobConfigurations = new ArrayList<>();
+    private final List<JobConfigurationImpl> jobConfigurations = new ArrayList<>();
 
-    public TriggeredScraperConfigurationBuilder addSource(String alias, String connectionString) {
+    public ScraperConfigurationTriggeredImplBuilder addSource(String alias, String connectionString) {
         sources.put(alias, connectionString);
         return this;
     }
 
-    public TriggeredJobConfigurationBuilder job(String name, String triggerConfig) {
-        return new TriggeredJobConfigurationBuilder(this, name, triggerConfig);
+    public JobConfigurationTriggeredImplBuilder job(String name, String triggerConfig) {
+        return new JobConfigurationTriggeredImplBuilder(this, name, triggerConfig);
     }
 
-    public TriggeredScraperConfiguration build() {
-        return new TriggeredScraperConfiguration(sources, jobConfigurations);
+    public ScraperConfigurationTriggeredImpl build() {
+        return new ScraperConfigurationTriggeredImpl(sources, jobConfigurations);
     }
 
-    public void addJobConfiguration(TriggeredJobConfiguration configuration) {
+    public void addJobConfiguration(JobConfigurationTriggeredImpl configuration) {
         this.jobConfigurations.add(configuration);
     }
 }
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/TriggeredJobConfiguration.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/TriggeredJobConfiguration.java
deleted file mode 100644
index 73bb20e..0000000
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/config/triggeredscraper/TriggeredJobConfiguration.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.plc4x.java.scraper.config.triggeredscraper;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-import org.apache.plc4x.java.scraper.config.JobConfiguration;
-import org.apache.plc4x.java.scraper.config.ScraperConfiguration;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * Configuration for one {@link org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScrapeJobImpl} in the {@link org.apache.plc4x.java.scraper.config.triggeredscraper.TriggeredScraperConfiguration}.
- */
-public class TriggeredJobConfiguration implements JobConfiguration {
-
-    private final String name;
-    private final String triggerConfig;
-    private final List<String> sources;
-    private final Map<String, String> fields;
-
-    /**
-     * Default constructor
-     * @param name Job Name / identifier
-     * @param triggerConfig configuration string for triggered jobs
-     * @param sources source alias (<b>not</b> connection string but the alias (from @{@link ScraperConfiguration}).
-     * @param fields Map from field alias (how it is named in the result map) to plc4x field query
-     */
-    @JsonCreator
-    public TriggeredJobConfiguration(@JsonProperty(value = "name", required = true) String name,
-                                     @JsonProperty(value = "triggerConfig", required = true) String triggerConfig,
-                                     @JsonProperty(value = "sources", required = true) List<String> sources,
-                                     @JsonProperty(value = "fields", required = true) Map<String, String> fields) {
-        this.name = name;
-        this.triggerConfig = triggerConfig;
-        this.sources = sources;
-        this.fields = fields;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public String getTriggerConfig() {
-        return triggerConfig;
-    }
-
-    public List<String> getSources() {
-        return sources;
-    }
-
-    public Map<String, String> getFields() {
-        return fields;
-    }
-}
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScrapeJobImpl.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScrapeJobImpl.java
index 1a73144..b9f50cc 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScrapeJobImpl.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScrapeJobImpl.java
@@ -20,6 +20,7 @@
 package org.apache.plc4x.java.scraper.triggeredscraper;
 
 import org.apache.plc4x.java.scraper.ScrapeJob;
+import org.apache.plc4x.java.scraper.exception.ScraperConfigurationException;
 import org.apache.plc4x.java.scraper.exception.ScraperException;
 import org.apache.plc4x.java.scraper.triggeredscraper.triggerhandler.TriggerConfiguration;
 
@@ -33,7 +34,7 @@ public class TriggeredScrapeJobImpl implements ScrapeJob {
     private final TriggerConfiguration triggerConfiguration;
 
 
-    public TriggeredScrapeJobImpl(String jobName, String triggerConfig, Map<String, String> connections, Map<String, String> fields) throws ScraperException {
+    public TriggeredScrapeJobImpl(String jobName, String triggerConfig, Map<String, String> connections, Map<String, String> fields) throws ScraperConfigurationException {
         this.jobName = jobName;
         this.triggerConfig = triggerConfig;
         this.sourceConnections = connections;
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScraperImpl.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScraperImpl.java
index d3e8a19..386a412 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScraperImpl.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScraperImpl.java
@@ -32,8 +32,9 @@ import org.apache.plc4x.java.api.exceptions.PlcConnectionException;
 import org.apache.plc4x.java.api.exceptions.PlcRuntimeException;
 import org.apache.plc4x.java.api.messages.PlcReadResponse;
 import org.apache.plc4x.java.scraper.*;
+import org.apache.plc4x.java.scraper.config.ScraperConfiguration;
 import org.apache.plc4x.java.scraper.exception.ScraperException;
-import org.apache.plc4x.java.scraper.config.triggeredscraper.TriggeredScraperConfiguration;
+import org.apache.plc4x.java.scraper.config.triggeredscraper.ScraperConfigurationTriggeredImpl;
 import org.apache.plc4x.java.scraper.triggeredscraper.triggerhandler.collector.TriggerCollector;
 import org.apache.plc4x.java.scraper.util.PercentageAboveThreshold;
 import org.apache.plc4x.java.spi.PlcDriver;
@@ -64,22 +65,12 @@ import java.util.stream.Collectors;
 public class TriggeredScraperImpl implements Scraper, TriggeredScraperMBean {
 
     private static final Logger LOGGER = LoggerFactory.getLogger(TriggeredScraperImpl.class);
-    public static final String MX_DOMAIN = "org.apache.plc4x.java";
+    private static final String MX_DOMAIN = "org.apache.plc4x.java";
 
     private static final int DEFAULT_FUTURE_TIME_OUT = 2000;
 
-    private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(10,
-        new BasicThreadFactory.Builder()
-            .namingPattern("triggeredscraper-scheduling-thread-%d")
-            .daemon(false)
-            .build()
-    );
-    private final ExecutorService executorService = Executors.newFixedThreadPool(4,
-        new BasicThreadFactory.Builder()
-            .namingPattern("triggeredscraper-executer-thread-%d")
-            .daemon(true)
-            .build()
-    );
+    private final ScheduledExecutorService scheduler;
+    private final ExecutorService executorService;
 
     private final ResultHandler resultHandler;
 
@@ -99,7 +90,7 @@ public class TriggeredScraperImpl implements Scraper, TriggeredScraperMBean {
      * @param config Configuration to use.
      * @param resultHandler handler the defines the processing of acquired data
      */
-    public TriggeredScraperImpl(TriggeredScraperConfiguration config, ResultHandler resultHandler,TriggerCollector triggerCollector) throws ScraperException {
+    public TriggeredScraperImpl(ScraperConfiguration config, ResultHandler resultHandler, TriggerCollector triggerCollector) throws ScraperException {
         this(resultHandler, createPooledDriverManager(), config.getJobs(),triggerCollector,DEFAULT_FUTURE_TIME_OUT);
     }
 
@@ -109,12 +100,22 @@ public class TriggeredScraperImpl implements Scraper, TriggeredScraperMBean {
      * @param plcDriverManager external DriverManager
      * @param resultHandler handler the defines the processing of acquired data
      */
-    public TriggeredScraperImpl(TriggeredScraperConfiguration config, PlcDriverManager plcDriverManager, ResultHandler resultHandler,TriggerCollector triggerCollector) throws ScraperException {
+    public TriggeredScraperImpl(ScraperConfiguration config, PlcDriverManager plcDriverManager, ResultHandler resultHandler,TriggerCollector triggerCollector) throws ScraperException {
         this(resultHandler, plcDriverManager, config.getJobs(),triggerCollector,DEFAULT_FUTURE_TIME_OUT);
     }
 
     /**
      * Creates a Scraper instance from a configuration.
+     * @param config Configuration to use.
+     * @param plcDriverManager external DriverManager
+     * @param resultHandler handler the defines the processing of acquired data
+     */
+    public TriggeredScraperImpl(ScraperConfigurationTriggeredImpl config, PlcDriverManager plcDriverManager, ResultHandler resultHandler, TriggerCollector triggerCollector, int poolSizeScheduler, int poolSizeExecutor) throws ScraperException {
+        this(resultHandler, plcDriverManager, config.getJobs(),triggerCollector,DEFAULT_FUTURE_TIME_OUT,poolSizeScheduler,poolSizeExecutor);
+    }
+
+    /**
+     * Creates a Scraper instance from a configuration.
      * @param plcDriverManager external DriverManager
      * @param resultHandler handler the defines the processing of acquired data
      * @param jobs list of jobs that scraper shall handle
@@ -122,12 +123,32 @@ public class TriggeredScraperImpl implements Scraper, TriggeredScraperMBean {
      * @param futureTimeOut max duration of future to return a result
      */
     public TriggeredScraperImpl(ResultHandler resultHandler, PlcDriverManager plcDriverManager, List<ScrapeJob> jobs,TriggerCollector triggerCollector, long futureTimeOut) {
+        this(resultHandler,plcDriverManager,jobs,triggerCollector,futureTimeOut,20,5);
+    }
+
+    public TriggeredScraperImpl(ResultHandler resultHandler, PlcDriverManager plcDriverManager, List<ScrapeJob> jobs,TriggerCollector triggerCollector, long futureTimeOut, int poolSizeScheduler, int poolSizeExecutor) {
         this.resultHandler = resultHandler;
         Validate.notEmpty(jobs);
         this.driverManager = plcDriverManager;
         this.jobs = jobs;
         this.triggerCollector = triggerCollector;
         this.futureTimeOut = futureTimeOut;
+
+        this.scheduler = Executors.newScheduledThreadPool(poolSizeScheduler,
+            new BasicThreadFactory.Builder()
+                .namingPattern("triggeredscraper-scheduling-thread-%d")
+                .daemon(false)
+                .build()
+        );
+
+        this.executorService = Executors.newFixedThreadPool(poolSizeExecutor,
+            new BasicThreadFactory.Builder()
+                .namingPattern("triggeredscraper-executor-thread-%d")
+                .daemon(true)
+                .build()
+        );
+
+
         // Register MBean
         mBeanServer = ManagementFactory.getPlatformMBeanServer();
         try {
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScraperTask.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScraperTask.java
index 92cfcd9..ca18021 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScraperTask.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScraperTask.java
@@ -46,7 +46,6 @@ import java.util.stream.Collectors;
 
 /**
  * performs the triggered task from a job for one device based on the TriggerHandler as defined in Configuration
- * ToDo Implement the monitoring as well: PLC4X-90
  */
 public class TriggeredScraperTask implements ScraperTask, TriggeredScraperTaskMBean {
     private static final Logger LOGGER = LoggerFactory.getLogger(TriggeredScraperTask.class);
@@ -89,7 +88,6 @@ public class TriggeredScraperTask implements ScraperTask, TriggeredScraperTaskMB
     }
 
     @Override
-    //ToDo code-refactoring and improved testing --> PLC4X-90
     public void run() {
         if(LOGGER.isTraceEnabled()) {
             LOGGER.trace("Check condition for task of job {} for connection {}", jobName, connectionAlias);
@@ -116,14 +114,18 @@ public class TriggeredScraperTask implements ScraperTask, TriggeredScraperTaskMB
                 if(LOGGER.isTraceEnabled()) {
                     LOGGER.trace("Connection to {} established: {}", connectionString, connection);
                 }
-                PlcReadResponse response;
+
+                PlcReadResponse plcReadResponse;
                 try {
-                    PlcReadRequest.Builder builder = connection.readRequestBuilder();
-                    fields.forEach((alias, qry) -> {
-                        LOGGER.trace("Requesting: {} -> {}", alias, qry);
-                        builder.addItem(alias, qry);
-                    });
-                    response = builder
+                    PlcReadRequest.Builder readRequestBuilder = connection.readRequestBuilder();
+                    for(Map.Entry<String,String> entry:fields.entrySet()){
+                        if(LOGGER.isTraceEnabled()) {
+                            LOGGER.trace("Requesting: {} -> {}", entry.getKey(), entry.getValue());
+                        }
+                        readRequestBuilder.addItem(entry.getKey(),entry.getValue());
+                    }
+                    //build and send request and store result in read response
+                    plcReadResponse = readRequestBuilder
                         .build()
                         .execute()
                         .get(requestTimeoutMs, TimeUnit.MILLISECONDS);
@@ -132,19 +134,19 @@ public class TriggeredScraperTask implements ScraperTask, TriggeredScraperTaskMB
                     handleException(e);
                     return;
                 }
+
                 // Add statistics
+                LOGGER.debug("Performing statistics");
                 stopWatch.stop();
                 latencyStatistics.addValue(stopWatch.getNanoTime());
                 failedStatistics.addValue(0.0);
                 successCounter.incrementAndGet();
                 // Validate response
-                validateResponse(response);
+                validateResponse(plcReadResponse);
                 // Handle response (Async)
-                CompletableFuture.runAsync(() -> resultHandler.handle(jobName, connectionAlias, TriggeredScraperImpl.convertPlcResponseToMap(response)), executorService);
+                CompletableFuture.runAsync(() -> resultHandler.handle(jobName, connectionAlias, TriggeredScraperImpl.convertPlcResponseToMap(plcReadResponse)), executorService);
             } catch (Exception e) {
-                if(LOGGER.isDebugEnabled()) {
-                    LOGGER.debug("Exception during scrape", e);
-                }
+                LOGGER.warn("Exception during scraping of Job {}, Connection-Alias {}: Error-message: {} - for stack-trace change logging to DEBUG", jobName,connectionAlias,e.getMessage());
                 handleException(e);
             } finally {
                 if (connection != null) {
@@ -175,8 +177,6 @@ public class TriggeredScraperTask implements ScraperTask, TriggeredScraperTaskMB
         }
     }
 
-
-
     @Override
     public String getJobName() {
         return this.jobName;
@@ -204,12 +204,15 @@ public class TriggeredScraperTask implements ScraperTask, TriggeredScraperTaskMB
 
     @Override
     public void handleException(Exception e) {
-        // TODO Implement this
+        if(LOGGER.isDebugEnabled()) {
+            LOGGER.debug("Detailed exception occurred at scraping", e);
+        }
+        failedStatistics.addValue(1.0);
     }
 
     @Override
     public void handleErrorResponse(Map<String, PlcResponseCode> failed) {
-        // TODO Implement this
+        LOGGER.warn("Handling error responses: {}", failed);
     }
 
     public PlcDriverManager getDriverManager() {
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/TriggerConfiguration.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/TriggerConfiguration.java
index 6fbd234..0049e55 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/TriggerConfiguration.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/TriggerConfiguration.java
@@ -19,9 +19,9 @@
 
 package org.apache.plc4x.java.scraper.triggeredscraper.triggerhandler;
 
-import org.apache.plc4x.java.api.exceptions.PlcInvalidFieldException;
 import org.apache.plc4x.java.api.model.PlcField;
 import org.apache.plc4x.java.s7.model.S7Field;
+import org.apache.plc4x.java.scraper.exception.ScraperConfigurationException;
 import org.apache.plc4x.java.scraper.exception.ScraperException;
 import org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScrapeJobImpl;
 import org.slf4j.Logger;
@@ -35,28 +35,23 @@ import java.util.regex.Pattern;
 /**
  * basic configuration for all available triggers and handling of regarding condition
  */
-//ToDo: Improve structure to make it more generic --> PLC4X-89
 public class TriggerConfiguration{
     private static final Logger logger = LoggerFactory.getLogger(TriggerConfiguration.class);
 
     private static final String S_7_TRIGGER_VAR = "S7_TRIGGER_VAR";
-    private static final String SCHEDULED = "SCHEDULED";
+    private static final String SCHEDULED       = "SCHEDULED";
+    private static final String PREVIOUS_DEF    = "PREV";
 
     private static final double TOLERANCE_FLOATING_EQUALITY = 1e-6;
 
     private static final Pattern TRIGGER_STRATEGY_PATTERN =
         Pattern.compile("\\((?<strategy>[A-Z_0-9]+),(?<scheduledInterval>\\d+)(,(\\((?<triggerVar>[^!=<>()]+)\\))((?<comp>[!=<>]{1,2}))(\\((?<compVar>[PREVa-z0-9.\\-]+)\\))((?<concatConn>[ANDOR]{2,3})(\\((?<triggerVar2>[^!=<>()]+)\\))((?<comp2>[!=<>]{1,2}))(\\((?<compVar2>[PREVa-z0-9.\\-]+)\\)))?)?\\)");
-    private static final String PREVIOUS_DEF = "PREV";
 
     private final TriggerType triggerType;
     private final Long scrapeInterval;
     private TriggeredScrapeJobImpl triggeredScrapeJobImpl;
-
-
     private List<TriggerElement> triggerElementList;
 
-
-
     /**
      * default constructor when an S7Field should be used for triggering
      * @param triggerType type of trigger from enum
@@ -69,7 +64,7 @@ public class TriggerConfiguration{
                                 String scrapeInterval,
                                 List<TriggerElement> triggerElementList,
                                 TriggeredScrapeJobImpl triggeredScrapeJobImpl)
-                                throws ScraperException {
+                                throws ScraperConfigurationException {
         this.triggerElementList = triggerElementList;
         this.triggerType = triggerType;
         this.triggeredScrapeJobImpl = triggeredScrapeJobImpl;
@@ -79,18 +74,17 @@ public class TriggerConfiguration{
 
         if(this.triggerType.equals(TriggerType.S7_TRIGGER_VAR)) {
             //test for valid field-connection string, on exception quit job and return message to user
-
-                if(this.triggerElementList.isEmpty()){
-                    exceptionMessage = String.format("No items in trigger List for trigger-type S7_TRIGGER_VAR for Job !", triggeredScrapeJobImpl.getJobName());
-                    throw new ScraperException(exceptionMessage);
-                }
-                checkTriggerVarList();
+            if(this.triggerElementList.isEmpty()){
+                exceptionMessage = String.format("No items in trigger List for trigger-type S7_TRIGGER_VAR for Job %s!", triggeredScrapeJobImpl.getJobName());
+                throw new ScraperConfigurationException(exceptionMessage);
+            }
+            checkTriggerVarList();
 
             //ToDo add more and other trigger
         }
         else{
             exceptionMessage = String.format("TriggerType %s is not yet implemented", this.triggerType);
-            throw new ScraperException(exceptionMessage);
+            throw new ScraperConfigurationException(exceptionMessage);
         }
 
     }
@@ -99,9 +93,9 @@ public class TriggerConfiguration{
      * default constructor when scheduled trigger shall be performed
      * @param triggerType type of trigger from enum
      * @param scrapeInterval scrape interval of data from block
-     * @throws ScraperException when something goes wrong with configuration
+     * @throws ScraperConfigurationException when something goes wrong with configuration
      */
-    public TriggerConfiguration(TriggerType triggerType, String scrapeInterval) throws ScraperException {
+    public TriggerConfiguration(TriggerType triggerType, String scrapeInterval) throws ScraperConfigurationException {
         this.triggerType = triggerType;
         this.scrapeInterval = parseScrapeInterval(scrapeInterval);
         this.triggerElementList = new ArrayList<>();
@@ -109,13 +103,13 @@ public class TriggerConfiguration{
 
     /**
      * checks the trigger list for correct syntax
-     * @throws ScraperException if syntax isn't correct an exception is thrown
+     * @throws ScraperConfigurationException if syntax isn't correct an exception is thrown
      */
-    private void checkTriggerVarList() throws ScraperException {
+    private void checkTriggerVarList() throws ScraperConfigurationException {
         boolean first = true;
         for(TriggerElement triggerElement:this.triggerElementList){
             if(!first && triggerElement.getConcatType()==null){
-                throw new ScraperException("A concat for the second and following trigger must be given!");
+                throw new ScraperConfigurationException("A concat for the second and following trigger must be given!");
             }
             first = false;
         }
@@ -125,16 +119,16 @@ public class TriggerConfiguration{
      * parses String of scrape interval
      * @param scrapeInterval string extracted from RegEx
      * @return converted value
-     * @throws ScraperException if parsing could not be performed
+     * @throws ScraperConfigurationException if parsing could not be performed
      */
-    private long parseScrapeInterval(String scrapeInterval) throws ScraperException {
+    private long parseScrapeInterval(String scrapeInterval) throws ScraperConfigurationException {
         try {
             return Long.parseLong(scrapeInterval);
         }
         catch (Exception e){
             handleException(e);
             String exceptionMessage = String.format("No valid numeric for scrapeInterval for Job %s: %s",triggeredScrapeJobImpl.getJobName(),scrapeInterval);
-            throw new ScraperException(exceptionMessage);
+            throw new ScraperConfigurationException(exceptionMessage);
         }
     }
 
@@ -156,7 +150,7 @@ public class TriggerConfiguration{
      * @throws ScraperException when an unsupported S7-Type is chosen,which is not (yet) implemented for comparison
      * ToDo check how to handle time-variables if needed
      */
-    private static Class<?> validateDataType(PlcField plcField) throws ScraperException {
+    private static Class<?> validateDataType(PlcField plcField) throws ScraperConfigurationException {
         if(plcField!=null){
             Class<?> javaDataType = plcField.getDefaultJavaType();
             if(!javaDataType.equals(Boolean.class)
@@ -165,18 +159,17 @@ public class TriggerConfiguration{
                 && !javaDataType.equals(Double.class)
             ){
                 String exceptionMessage = String.format("Unsupported plc-trigger variable %s with converted data-type %s used",plcField,plcField.getDefaultJavaType());
-                throw new ScraperException(exceptionMessage);
+                throw new ScraperConfigurationException(exceptionMessage);
             }
             return javaDataType;
         }
         else{
             String exceptionMessage = String.format("Unsupported plc-trigger variable %s with converted data-type %s used",plcField,plcField.getDefaultJavaType());
-            throw new ScraperException(exceptionMessage);
+            throw new ScraperConfigurationException(exceptionMessage);
         }
 
     }
 
-
     /**
      * nested class performing the trigger evaluation
      */
@@ -191,9 +184,9 @@ public class TriggerConfiguration{
 
         /**
          * does the evaluation of the trigger conditions are met
-         * //ToDo refactor this
+         * //ToDo refactor this to improve readability
          * @return true if trigger conditions are met, false otherwise
-         * @throws ScraperException
+         * @throws ScraperException if something went wrong
          */
         boolean evaluateTrigger() throws ScraperException {
             List<Boolean> triggerResultList = new ArrayList<>();
@@ -204,10 +197,12 @@ public class TriggerConfiguration{
                 }
                 logger.trace("eval values for job {} and {}: {}",triggeredScrapeJobImpl.getJobName(),connString,acquiredValuesList);
             }
+            //iterate through all items of acquirement-list
             for(int countElements=0; countElements<acquiredValuesList.size();countElements++){
                 TriggerElement triggerElement = triggerElementList.get(countElements);
                 Object acquiredObject = acquiredValuesList.get(countElements);
                 if(validateDataType(triggerElement.getPlcField()).equals(Boolean.class)){
+                    //if given type is Boolean
                     boolean currentValue;
                     boolean refValue;
                     try{
@@ -228,7 +223,8 @@ public class TriggerConfiguration{
                 if(validateDataType(triggerElement.getPlcField()).equals(Double.class)
                     || validateDataType(triggerElement.getPlcField()).equals(Integer.class)
                     || validateDataType(triggerElement.getPlcField()).equals(Long.class)) {
-                    boolean skipComparison = false;
+                    //if given type is numerical
+                    boolean skipComparison = false; //comparison shall be skipped if previous values was null
                     double currentValue;
                     double refValue = 0;
                     try{
@@ -324,6 +320,7 @@ public class TriggerConfiguration{
                 }
                 return false;
             }
+            //check if there is more then one condition for trigger
             if(triggerResultList.size()>1) {
                 if(logger.isTraceEnabled()) {
                     logger.trace("{}", triggerResultList);
@@ -343,13 +340,13 @@ public class TriggerConfiguration{
                     }
                 }
                 if(combinedResult) {
-                    triggerElementList.forEach(triggerElement -> triggerElement.overrideCompareValue());
+                    triggerElementList.forEach(TriggerElement::overrideCompareValue);
                 }
                 return combinedResult;
             }
             else{
                 if(triggerResultList.get(0)) {
-                    triggerElementList.forEach(triggerElement -> triggerElement.overrideCompareValue());
+                    triggerElementList.forEach(TriggerElement::overrideCompareValue);
                 }
                 //return first result because its the only one
                 return triggerResultList.get(0);
@@ -364,7 +361,7 @@ public class TriggerConfiguration{
      * @return created TriggerConfiguration
      * @throws ScraperException when something goes wrong
      */
-    public static TriggerConfiguration createConfiguration(String jobTriggerStrategy,TriggeredScrapeJobImpl triggeredScrapeJob) throws ScraperException {
+    public static TriggerConfiguration createConfiguration(String jobTriggerStrategy,TriggeredScrapeJobImpl triggeredScrapeJob) throws ScraperConfigurationException {
         Matcher matcher = TRIGGER_STRATEGY_PATTERN.matcher(jobTriggerStrategy);
 
         if(matcher.matches()){
@@ -382,7 +379,7 @@ public class TriggerConfiguration{
                 case S_7_TRIGGER_VAR:
 
                     if(triggerVar ==null || comparatorString==null || comparatorVariable==null){
-                        throw new ScraperException("S7_TRIGGER_VAR trigger strategy needs the trigger-condition - information missing! given configString: "+jobTriggerStrategy);
+                        throw new ScraperConfigurationException("S7_TRIGGER_VAR trigger strategy needs the trigger-condition - information missing! given configString: "+jobTriggerStrategy);
                     }
 
                     List<TriggerElement> triggerElements = new ArrayList<>();
@@ -418,21 +415,19 @@ public class TriggerConfiguration{
                     }
 
                     //ToDo add clever Strategy to concat more than two conditions if needed
-
-
                     return new TriggerConfiguration(TriggerType.S7_TRIGGER_VAR,scheduledMs,triggerElements,triggeredScrapeJob);
                 case SCHEDULED:
                     if(triggerVar !=null || comparatorString!=null || comparatorVariable!=null){
-                        throw new ScraperException("SCHEDULED trigger strategy must only be used with scheduled interval - nothing more!  given configString: "+jobTriggerStrategy);
+                        throw new ScraperConfigurationException("SCHEDULED trigger strategy must only be used with scheduled interval - nothing more!  given configString: "+jobTriggerStrategy);
                     }
                     return new TriggerConfiguration(TriggerType.SCHEDULED,scheduledMs);
                 default:
-                    throw new ScraperException("Unknown Trigger Strategy "+triggerStrategy);
+                    throw new ScraperConfigurationException("Unknown Trigger Strategy "+triggerStrategy);
             }
 
 
         }
-        throw new ScraperException("Invalid trigger strategy string description: "+jobTriggerStrategy);
+        throw new ScraperConfigurationException("Invalid trigger strategy string description: "+jobTriggerStrategy);
     }
 
     private void handleException(Exception e){
@@ -533,7 +528,7 @@ public class TriggerConfiguration{
             this.plcField = plcField;
         }
 
-        TriggerElement(String comparator, String concatType, String compareValue, String plcField, String triggerStrategy) throws ScraperException {
+        TriggerElement(String comparator, String concatType, String compareValue, String plcField, String triggerStrategy) throws ScraperConfigurationException {
             this();
             this.plcFieldString = plcField;
             this.plcConnectionString = plcConnectionString;
@@ -545,7 +540,7 @@ public class TriggerConfiguration{
                     if(logger.isDebugEnabled()) {
                         logger.debug("Exception occurred parsing a S7Field");
                     }
-                    throw new ScraperException("Exception on parsing S7Field (" + plcField + "): " + e.getMessage());
+                    throw new ScraperConfigurationException("Exception on parsing S7Field (" + plcField + "): " + e.getMessage());
                 }
                 this.compareValue = convertCompareValue(compareValue,this.plcField);
                 this.comparatorType = detectComparatorType(comparator);
@@ -563,7 +558,7 @@ public class TriggerConfiguration{
          * @return converted object to needed data-type
          * @throws ScraperException when something does not match or parsing fails
          */
-        private Object convertCompareValue(String compareValue, PlcField plcField) throws ScraperException {
+        private Object convertCompareValue(String compareValue, PlcField plcField) throws ScraperConfigurationException {
             Class<?> javaDataType = validateDataType(plcField);
             if(javaDataType.equals(Boolean.class)){
                 switch (compareValue){
@@ -575,7 +570,7 @@ public class TriggerConfiguration{
                         return false;
                     default:
                         String exceptionMessage = String.format("No valid compare Value at DataType Boolean for trigger: %s",compareValue);
-                        throw new ScraperException(exceptionMessage);
+                        throw new ScraperConfigurationException(exceptionMessage);
                 }
             }
             if(javaDataType.equals(Double.class)
@@ -593,18 +588,18 @@ public class TriggerConfiguration{
                 catch (Exception e){
                     logger.debug(e.getMessage(), e);
                     String exceptionMessage = String.format("No valid compare Value at DataType Numeric for trigger: %s",compareValue);
-                    throw new ScraperException(exceptionMessage);
+                    throw new ScraperConfigurationException(exceptionMessage);
                 }
             }
             String exceptionMessage = "Invalid Datatype detected ... should not happen and be catcht earlier - please report";
-            throw new ScraperException(exceptionMessage);
+            throw new ScraperConfigurationException(exceptionMessage);
         }
 
         /**
          * converts parsed comparator from regex to ComparatorType
          * @throws ScraperException when no valid comparator has been used
          */
-        private Comparator detectComparatorType(String comparator) throws ScraperException {
+        private Comparator detectComparatorType(String comparator) throws ScraperConfigurationException {
             switch (comparator){
                 case "==":
                     return Comparator.EQUAL;
@@ -619,7 +614,7 @@ public class TriggerConfiguration{
                 case ">":
                     return Comparator.GREATER;
                 default:
-                    throw new ScraperException("Invalid comparator detected!");
+                    throw new ScraperConfigurationException("Invalid comparator detected!");
             }
         }
 
@@ -627,7 +622,7 @@ public class TriggerConfiguration{
          * convertes parsed comparator from regex to ComparatorType
          * @throws ScraperException when no valid comparator has been used
          */
-        private ConcatType detectConcatType(String concat) throws ScraperException {
+        private ConcatType detectConcatType(String concat) throws ScraperConfigurationException {
             //concat is not necessary in every case, correct usage is checked later on
             if(concat==null){
                 return null;
@@ -638,7 +633,7 @@ public class TriggerConfiguration{
                 case "OR":
                     return ConcatType.OR;
                 default:
-                    throw new ScraperException("Invalid concat between triggerVars detected: "+concat);
+                    throw new ScraperConfigurationException("Invalid concat between triggerVars detected: "+concat);
             }
         }
 
@@ -646,11 +641,11 @@ public class TriggerConfiguration{
          * matches data-type and comparator for a valid combination
          * @throws ScraperException when invalid combination is detected
          */
-        private void matchTypeAndComparator() throws ScraperException {
+        private void matchTypeAndComparator() throws ScraperConfigurationException {
             if(validateDataType(this.plcField).equals(Boolean.class)
                 && !(this.comparatorType.equals(Comparator.EQUAL) || this.comparatorType.equals(Comparator.UNEQUAL))){
                 String exceptionMessage = String.format("Trigger-Data-Type (%s) and Comparator (%s) do not match",this.plcField.getDefaultJavaType(),this.comparatorType);
-                throw new ScraperException(exceptionMessage);
+                throw new ScraperConfigurationException(exceptionMessage);
             }
             //all other combinations are valid
         }
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/collector/TriggerCollector.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/collector/TriggerCollector.java
index 37d2eed..5e1ea0f 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/collector/TriggerCollector.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/collector/TriggerCollector.java
@@ -21,7 +21,8 @@ package org.apache.plc4x.java.scraper.triggeredscraper.triggerhandler.collector;
 import org.apache.plc4x.java.scraper.exception.ScraperException;
 
 /**
- * defines the interface for implementing a TriggerCollector that handles and acquires all triggerRequests that need a PlcConnection
+ * defines the interface for implementing a TriggerCollector
+ * that handles and acquires all triggerRequests at once that needs a PlcConnection
  */
 public interface TriggerCollector {
 
diff --git a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/collector/TriggerCollectorImpl.java b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/collector/TriggerCollectorImpl.java
index 02d158b..a87cc73 100644
--- a/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/collector/TriggerCollectorImpl.java
+++ b/plc4j/utils/scraper/src/main/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/collector/TriggerCollectorImpl.java
@@ -21,11 +21,8 @@ package org.apache.plc4x.java.scraper.triggeredscraper.triggerhandler.collector;
 import org.apache.commons.lang3.concurrent.BasicThreadFactory;
 import org.apache.plc4x.java.PlcDriverManager;
 import org.apache.plc4x.java.api.PlcConnection;
-import org.apache.plc4x.java.api.exceptions.PlcConnectionException;
-import org.apache.plc4x.java.api.exceptions.PlcRuntimeException;
 import org.apache.plc4x.java.api.messages.PlcReadRequest;
 import org.apache.plc4x.java.api.messages.PlcReadResponse;
-import org.apache.plc4x.java.scraper.Scraper;
 import org.apache.plc4x.java.scraper.exception.ScraperException;
 import org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScraperImpl;
 import org.slf4j.Logger;
@@ -43,32 +40,40 @@ public class TriggerCollectorImpl implements TriggerCollector {
     private static final Logger logger = LoggerFactory.getLogger( TriggerCollectorImpl.class );
 
     private static final int DEFAULT_SCHEDULED_TRIGGER_INTERVAL = 1000;
-    private static final int FUTURE_TIMEOUT = 2000;
-    private static final int READ_REQUEST_TIMEOUT = 2000;
+    private static final int FUTURE_TIMEOUT                     = 2000;
+    private static final int READ_REQUEST_TIMEOUT               = 2000;
+
     private final PlcDriverManager plcDriverManager;
     private final Map<String,RequestElement> currentRequestElements;
     private long schedulerInterval;
     private final long futureTimeout;
 
-    private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(3,
-        new BasicThreadFactory.Builder()
-            .namingPattern("triggercollector-scheduler-thread-%d")
-            .daemon(false)
-            .build()
-    );
-    private final ExecutorService executorService = Executors.newFixedThreadPool(10,
-        new BasicThreadFactory.Builder()
-            .namingPattern("triggercollector-executer-thread-%d")
-            .daemon(true)
-            .build()
-    );
-
+    private final ScheduledExecutorService scheduledExecutorService;
+    private final ExecutorService executorService;
 
-    public TriggerCollectorImpl(PlcDriverManager plcDriverManager, long schedulerInterval, long futureTimeout) {
+    public TriggerCollectorImpl(PlcDriverManager plcDriverManager, long schedulerInterval, long futureTimeout, int poolSizeScheduler, int poolSizeExecutor) {
         this.plcDriverManager = plcDriverManager;
         this.currentRequestElements = new ConcurrentHashMap<>();
         this.schedulerInterval = schedulerInterval;
         this.futureTimeout = futureTimeout;
+
+        this.scheduledExecutorService = Executors.newScheduledThreadPool(poolSizeScheduler,
+            new BasicThreadFactory.Builder()
+                .namingPattern("triggercollector-scheduledExecutorService-thread-%d")
+                .daemon(false)
+                .build()
+        );
+        this.executorService = Executors.newFixedThreadPool(poolSizeExecutor,
+            new BasicThreadFactory.Builder()
+                .namingPattern("triggercollector-executerService-thread-%d")
+                .daemon(true)
+                .build()
+        );
+
+    }
+
+    public TriggerCollectorImpl(PlcDriverManager plcDriverManager, long schedulerInterval, long futureTimeout) {
+        this(plcDriverManager,schedulerInterval,futureTimeout,10,20);
     }
 
     public TriggerCollectorImpl(PlcDriverManager plcDriverManager) {
@@ -212,14 +217,8 @@ public class TriggerCollectorImpl implements TriggerCollector {
      * @return the object acquired by requesting plc instance
      */
     @Override
-    public Object requestResult(String uuid, long timeout) throws ScraperException {
-        Object result = currentRequestElements.get(uuid).getResult();
-        /*
-        if(result==null){
-            throw new ScraperException("No result acquired yet");
-        }
-        */
-        return result;
+    public Object requestResult(String uuid, long timeout){
+        return currentRequestElements.get(uuid).getResult();
     }
 
     /**
@@ -227,7 +226,7 @@ public class TriggerCollectorImpl implements TriggerCollector {
      */
     @Override
     public void start() {
-        this.scheduler.scheduleAtFixedRate(() -> processActiveTrigger(), 1_000, this.schedulerInterval, TimeUnit.MILLISECONDS);
+        this.scheduledExecutorService.scheduleAtFixedRate(this::processActiveTrigger, 1_000, this.schedulerInterval, TimeUnit.MILLISECONDS);
     }
 
     /**
@@ -235,52 +234,37 @@ public class TriggerCollectorImpl implements TriggerCollector {
      */
     @Override
     public void stop() {
-        //ToDo stop everything the right way
+        this.scheduledExecutorService.shutdown();
+        this.executorService.shutdown();
     }
 
+
     class RequestElement{
         private String plcConnectionString;
         private String plcField;
-        private LocalDateTime submitTimeOut;
         private LocalDateTime lastAcquirement;
         private Object result;
         private String uuid;
-        private boolean submitted;
-        private CompletableFuture<PlcReadResponse> plcReadResponse;
         private long scanIntervalMs;
 
 
-        public RequestElement(String plcConnectionString, String plcField, long scanIntervalMs, String uuid) {
+        RequestElement(String plcConnectionString, String plcField, long scanIntervalMs, String uuid) {
             this.plcConnectionString = plcConnectionString;
             this.plcField = plcField;
-            this.submitted = false;
-            this.submitTimeOut = LocalDateTime.now().plus(scanIntervalMs, ChronoUnit.MILLIS);
             this.uuid = uuid;
             this.scanIntervalMs = scanIntervalMs;
             //set initial acquirement to a long time ago
             this.lastAcquirement = LocalDateTime.of(1,1,1,1,1,1);
         }
 
-        public String getPlcConnectionString() {
+        String getPlcConnectionString() {
             return plcConnectionString;
         }
 
-        public String getPlcField() {
+        String getPlcField() {
             return plcField;
         }
 
-        public LocalDateTime getSubmitTimeOut() {
-            return submitTimeOut;
-        }
-
-        public boolean isSubmitted() {
-            return submitted;
-        }
-
-        public void setSubmitted(boolean submitted) {
-            this.submitted = submitted;
-        }
-
         public Object getResult() {
             return result;
         }
@@ -289,31 +273,23 @@ public class TriggerCollectorImpl implements TriggerCollector {
             this.result = result;
         }
 
-        public String getUuid() {
+        String getUuid() {
             return uuid;
         }
 
-        public CompletableFuture<PlcReadResponse> getPlcReadResponse() {
-            return plcReadResponse;
-        }
-
-        public void setPlcReadResponse(CompletableFuture<PlcReadResponse> plcReadResponse) {
-            this.plcReadResponse = plcReadResponse;
-        }
-
-        public long getScanIntervalMs() {
+        long getScanIntervalMs() {
             return scanIntervalMs;
         }
 
-        public void setScanIntervalMs(long scanIntervalMs) {
+        void setScanIntervalMs(long scanIntervalMs) {
             this.scanIntervalMs = scanIntervalMs;
         }
 
-        public LocalDateTime getLastAcquirement() {
+        LocalDateTime getLastAcquirement() {
             return lastAcquirement;
         }
 
-        public void setLastAcquirement(LocalDateTime lastAcquirement) {
+        void setLastAcquirement(LocalDateTime lastAcquirement) {
             this.lastAcquirement = lastAcquirement;
         }
 
@@ -330,6 +306,18 @@ public class TriggerCollectorImpl implements TriggerCollector {
         public int hashCode() {
             return Objects.hash(plcConnectionString, plcField);
         }
+
+        @Override
+        public String toString() {
+            return "RequestElement{" +
+                "plcConnectionString='" + plcConnectionString + '\'' +
+                ", plcField='" + plcField + '\'' +
+                ", lastAcquirement=" + lastAcquirement +
+                ", result=" + result +
+                ", uuid='" + uuid + '\'' +
+                ", scanIntervalMs=" + scanIntervalMs +
+                '}';
+        }
     }
 
 }
diff --git a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperConfigurationTest.java b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperConfigurationTest.java
index a153b1c..11f4c35 100644
--- a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperConfigurationTest.java
+++ b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperConfigurationTest.java
@@ -24,8 +24,9 @@ import com.fasterxml.jackson.databind.exc.MismatchedInputException;
 import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
 import org.apache.plc4x.java.api.exceptions.PlcRuntimeException;
 import org.apache.plc4x.java.scraper.config.JobConfiguration;
-import org.apache.plc4x.java.scraper.config.JobConfigurationImpl;
+import org.apache.plc4x.java.scraper.config.JobConfigurationClassicImpl;
 import org.apache.plc4x.java.scraper.config.ScraperConfiguration;
+import org.apache.plc4x.java.scraper.config.ScraperConfigurationClassicImpl;
 import org.apache.plc4x.java.scraper.exception.ScraperException;
 import org.assertj.core.api.WithAssertions;
 import org.junit.jupiter.api.Nested;
@@ -58,7 +59,7 @@ class ScraperConfigurationTest implements WithAssertions {
                         "        a: DBasdf\n" +
                         "        b: DBbsdf\n";
 
-        ScraperConfiguration configuration = mapper.readValue(yaml, ScraperConfiguration.class);
+        ScraperConfiguration configuration = mapper.readValue(yaml, ScraperConfigurationClassicImpl.class);
 
         assertThat(configuration.getJobConfigurations()).hasSize(1);
         JobConfiguration conf = configuration.getJobConfigurations().get(0);
@@ -71,7 +72,7 @@ class ScraperConfigurationTest implements WithAssertions {
             .containsEntry("a3", "b");
 
         assertThat(conf.getName()).isEqualTo("job1");
-        assertThat(((JobConfigurationImpl)conf).getScrapeRate()).isEqualTo(10);
+        assertThat(((JobConfigurationClassicImpl)conf).getScrapeRate()).isEqualTo(10);
         assertThat(conf.getSources())
             .hasSize(3);
 
@@ -91,7 +92,7 @@ class ScraperConfigurationTest implements WithAssertions {
                         "      sources:\n" +
                         "        - a1\n";
 
-        assertThatThrownBy(() -> mapper.readValue(jobs, ScraperConfiguration.class))
+        assertThatThrownBy(() -> mapper.readValue(jobs, ScraperConfigurationClassicImpl.class))
             .isInstanceOf(MismatchedInputException.class);
     }
 
@@ -112,7 +113,7 @@ class ScraperConfigurationTest implements WithAssertions {
                         "      a: DBasdf\n" +
                         "      b: DBbsdf\n";
 
-        assertThatCode(() -> ScraperConfiguration.fromYaml(yaml))
+        assertThatCode(() -> ScraperConfiguration.fromYaml(yaml, ScraperConfigurationClassicImpl.class))
             .doesNotThrowAnyException();
     }
 
@@ -141,7 +142,7 @@ class ScraperConfigurationTest implements WithAssertions {
                         "    ]\n" +
                         "}";
 
-        assertThatCode(() -> ScraperConfiguration.fromJson(json))
+        assertThatCode(() -> ScraperConfiguration.fromJson(json, ScraperConfigurationClassicImpl.class))
             .doesNotThrowAnyException();
     }
 
@@ -156,7 +157,7 @@ class ScraperConfigurationTest implements WithAssertions {
                         "      - s1\n" +
                         "    fields:\n";
 
-        assertThatThrownBy(() -> ScraperConfiguration.fromYaml(yaml))
+        assertThatThrownBy(() -> ScraperConfiguration.fromYaml(yaml, ScraperConfigurationClassicImpl.class))
             .isInstanceOf(PlcRuntimeException.class)
             .hasStackTraceContaining("unreferenced sources: [s1]");
     }
@@ -173,7 +174,7 @@ class ScraperConfigurationTest implements WithAssertions {
                         "    fields:\n" +
                         "      field1: 'DB1 Field 1'\n";
 
-        List<ScrapeJob> jobs = ScraperConfiguration.fromYaml(yaml).getJobs();
+        List<ScrapeJob> jobs = ScraperConfiguration.fromYaml(yaml, ScraperConfigurationClassicImpl.class).getJobs();
         assertThat(jobs).hasSize(1);
 
         ScrapeJob job = jobs.get(0);
@@ -193,12 +194,12 @@ class ScraperConfigurationTest implements WithAssertions {
 
         @Test
         void json() throws IOException {
-            ScraperConfiguration conf = ScraperConfiguration.fromFile("src/test/resources/config.json");
+            ScraperConfiguration conf = ScraperConfiguration.fromFile("src/test/resources/config.json", ScraperConfigurationClassicImpl.class);
         }
 
         @Test
         void yaml() throws IOException {
-            ScraperConfiguration conf = ScraperConfiguration.fromFile("src/test/resources/config.yml");
+            ScraperConfiguration conf = ScraperConfiguration.fromFile("src/test/resources/config.yml", ScraperConfigurationClassicImpl.class);
         }
     }
 }
\ No newline at end of file
diff --git a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperRunner.java b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperRunner.java
index f3548bc..3a6f543 100644
--- a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperRunner.java
+++ b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/ScraperRunner.java
@@ -20,6 +20,7 @@
 package org.apache.plc4x.java.scraper;
 
 import org.apache.plc4x.java.scraper.config.ScraperConfiguration;
+import org.apache.plc4x.java.scraper.config.ScraperConfigurationClassicImpl;
 import org.apache.plc4x.java.scraper.exception.ScraperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -31,7 +32,7 @@ public class ScraperRunner {
     private static final Logger LOGGER = LoggerFactory.getLogger(ScraperRunner.class);
 
     public static void main(String[] args) throws IOException, ScraperException {
-        ScraperConfiguration configuration = ScraperConfiguration.fromFile("plc4j/utils/scraper/src/test/resources/example.yml");
+        ScraperConfiguration configuration = ScraperConfiguration.fromFile("plc4j/utils/scraper/src/test/resources/example.yml", ScraperConfigurationClassicImpl.class);
         Scraper scraper = new ScraperImpl(configuration, (j, a, m) -> LOGGER.info("Results from {}/{}: {}", j, a, m));
 
         scraper.start();
diff --git a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/TriggeredScraperRunner.java b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/TriggeredScraperRunner.java
index 5c22893..545b4aa 100644
--- a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/TriggeredScraperRunner.java
+++ b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/TriggeredScraperRunner.java
@@ -20,7 +20,8 @@
 package org.apache.plc4x.java.scraper;
 
 import org.apache.plc4x.java.PlcDriverManager;
-import org.apache.plc4x.java.scraper.config.triggeredscraper.TriggeredScraperConfiguration;
+import org.apache.plc4x.java.scraper.config.ScraperConfiguration;
+import org.apache.plc4x.java.scraper.config.triggeredscraper.ScraperConfigurationTriggeredImpl;
 import org.apache.plc4x.java.scraper.exception.ScraperException;
 
 
@@ -41,7 +42,7 @@ public class TriggeredScraperRunner {
      * testing of TriggeredScraper vs real device
      */
     public static void main(String[] args) throws IOException, ScraperException {
-        TriggeredScraperConfiguration configuration = TriggeredScraperConfiguration.fromFile("plc4j/utils/scraper/src/test/resources/example_triggered_scraper.yml");
+        ScraperConfiguration configuration = ScraperConfiguration.fromFile("plc4j/utils/scraper/src/test/resources/example_triggered_scraper.yml", ScraperConfigurationTriggeredImpl.class);
 
         PlcDriverManager plcDriverManager = new PooledPlcDriverManager();
         TriggerCollector triggerCollector = new TriggerCollectorImpl(plcDriverManager);
diff --git a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/TriggeredScraperRunnerModbus.java b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/TriggeredScraperRunnerModbus.java
index 345b3b8..fd3fe3a 100644
--- a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/TriggeredScraperRunnerModbus.java
+++ b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/TriggeredScraperRunnerModbus.java
@@ -21,7 +21,8 @@ package org.apache.plc4x.java.scraper;
 
 import org.apache.plc4x.java.PlcDriverManager;
 import org.apache.plc4x.java.modbus.connection.ModbusConnectionFactory;
-import org.apache.plc4x.java.scraper.config.triggeredscraper.TriggeredScraperConfiguration;
+import org.apache.plc4x.java.scraper.config.ScraperConfiguration;
+import org.apache.plc4x.java.scraper.config.triggeredscraper.ScraperConfigurationTriggeredImpl;
 import org.apache.plc4x.java.scraper.exception.ScraperException;
 import org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScraperImpl;
 import org.apache.plc4x.java.scraper.triggeredscraper.triggerhandler.collector.TriggerCollectorImpl;
@@ -41,7 +42,7 @@ public class TriggeredScraperRunnerModbus {
      * testing of TriggeredScraper vs real device (Modbus)
      */
     public static void main(String[] args) throws IOException, ScraperException {
-        TriggeredScraperConfiguration configuration = TriggeredScraperConfiguration.fromFile("plc4j/utils/scraper/src/test/resources/example_triggered_scraper_modbus.yml");
+        ScraperConfiguration configuration = ScraperConfiguration.fromFile("plc4j/utils/scraper/src/test/resources/example_triggered_scraper_modbus.yml", ScraperConfigurationTriggeredImpl.class);
         PlcDriverManager plcDriverManager = new PooledPlcDriverManager();
         TriggeredScraperImpl scraper = new TriggeredScraperImpl(
             configuration,
diff --git a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/config/ScraperConfigurationBuilderTest.java b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/config/ScraperConfigurationBuilderTest.java
index 71b16a8..5ce58eb 100644
--- a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/config/ScraperConfigurationBuilderTest.java
+++ b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/config/ScraperConfigurationBuilderTest.java
@@ -23,8 +23,6 @@ import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
 import org.assertj.core.api.WithAssertions;
-import org.junit.Ignore;
-import org.junit.jupiter.api.Test;
 
 import java.util.Arrays;
 import java.util.List;
@@ -33,18 +31,18 @@ class ScraperConfigurationBuilderTest implements WithAssertions {
 
     //ToDo test is failing idon't know why (Tim)
     void builder_usage_example() throws JsonProcessingException {
-        ScraperConfigurationBuilder builder = new ScraperConfigurationBuilder();
+        ScraperConfigurationClassicImplBuilder builder = new ScraperConfigurationClassicImplBuilder();
         List<String> sources = Arrays.asList("s1", "s2");
         List<String> jobs = Arrays.asList("j1", "j2");
 
         sources.forEach(source -> builder.addSource(source, source));
         for (String job : jobs) {
-            JobConfigurationImplBuilder jobConfigurationImplBuilder = builder.job(job, 10);
-            sources.forEach(jobConfigurationImplBuilder::source);
+            JobConfigurationClassicImplBuilder jobConfigurationClassicImplBuilder = builder.job(job, 10);
+            sources.forEach(jobConfigurationClassicImplBuilder::source);
             for (int i = 1; i <= 10; i++) {
-                jobConfigurationImplBuilder.field("f" + i, "qry" + i);
+                jobConfigurationClassicImplBuilder.field("f" + i, "qry" + i);
             }
-            jobConfigurationImplBuilder.build();
+            jobConfigurationClassicImplBuilder.build();
         }
 
         ScraperConfiguration configuration = builder.build();
diff --git a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScraperImplTest.java b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScraperImplTest.java
index c47b938..d1bbefa 100644
--- a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScraperImplTest.java
+++ b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/triggeredscraper/TriggeredScraperImplTest.java
@@ -21,13 +21,13 @@ package org.apache.plc4x.java.scraper.triggeredscraper;
 
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.plc4x.java.PlcDriverManager;
-import org.apache.plc4x.java.api.PlcConnection;
 import org.apache.plc4x.java.api.types.PlcResponseCode;
 import org.apache.plc4x.java.base.messages.items.DefaultBooleanFieldItem;
 import org.apache.plc4x.java.base.messages.items.DefaultLongFieldItem;
 import org.apache.plc4x.java.mock.MockDevice;
 import org.apache.plc4x.java.mock.PlcMockConnection;
-import org.apache.plc4x.java.scraper.config.triggeredscraper.TriggeredScraperConfiguration;
+import org.apache.plc4x.java.scraper.config.ScraperConfiguration;
+import org.apache.plc4x.java.scraper.config.ScraperConfigurationClassicImpl;
 import org.apache.plc4x.java.scraper.exception.ScraperException;
 import org.apache.plc4x.java.scraper.triggeredscraper.triggerhandler.collector.TriggerCollector;
 import org.apache.plc4x.java.scraper.triggeredscraper.triggerhandler.collector.TriggerCollectorImpl;
@@ -42,7 +42,6 @@ import org.slf4j.LoggerFactory;
 import java.io.IOException;
 import java.util.Random;
 
-import static org.junit.Assert.*;
 import static org.mockito.ArgumentMatchers.any;
 import static org.mockito.ArgumentMatchers.eq;
 import static org.mockito.Mockito.verify;
@@ -106,7 +105,7 @@ public class TriggeredScraperImplTest {
         when(mockDevice1.read(eq("%DB810:DBW0:INT"))).thenReturn(Pair.of(PlcResponseCode.OK, new DefaultLongFieldItem(3L)));
         when(mockDevice2.read(eq("%DB810:DBW0:INT"))).thenReturn(Pair.of(PlcResponseCode.OK, new DefaultLongFieldItem(4L)));
 
-        TriggeredScraperConfiguration configuration = TriggeredScraperConfiguration.fromFile("src/test/resources/mock-scraper-config.yml");
+        ScraperConfiguration configuration = ScraperConfiguration.fromFile("src/test/resources/mock-scraper-config.yml", ScraperConfigurationClassicImpl.class);
         TriggerCollector triggerCollector = new TriggerCollectorImpl(driverManager);
         TriggeredScraperImpl scraper = new TriggeredScraperImpl((j, a, m) -> System.out.println(String.format("Results from %s/%s: %s", j, a, m)), driverManager, configuration.getJobs(),triggerCollector,1000);
 
diff --git a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/TriggerConfigurationTest.java b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/TriggerConfigurationTest.java
index 68402a9..f2c1810 100644
--- a/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/TriggerConfigurationTest.java
+++ b/plc4j/utils/scraper/src/test/java/org/apache/plc4x/java/scraper/triggeredscraper/triggerhandler/TriggerConfigurationTest.java
@@ -19,6 +19,7 @@
 
 package org.apache.plc4x.java.scraper.triggeredscraper.triggerhandler;
 
+import org.apache.plc4x.java.scraper.exception.ScraperConfigurationException;
 import org.apache.plc4x.java.scraper.exception.ScraperException;
 import org.apache.plc4x.java.scraper.triggeredscraper.TriggeredScrapeJobImpl;
 import org.apache.plc4x.test.FastTests;
@@ -106,7 +107,7 @@ class TriggerConfigurationTest {
         TriggerConfiguration triggerConfiguration = null;
         try {
             triggerConfiguration = TriggerConfiguration.createConfiguration(triggerConfig,triggeredScrapeJob);
-        } catch (ScraperException e) {
+        } catch (ScraperConfigurationException e) {
             //should not happen
         }
 
@@ -139,7 +140,7 @@ class TriggerConfigurationTest {
             triggerConfiguration = TriggerConfiguration.createConfiguration(triggerConfig,triggeredScrapeJob);
             assertThat(triggerConfiguration,null);
             //NPE should happen when test fails!
-        } catch (ScraperException e) {
+        } catch (ScraperConfigurationException e) {
             LOGGER.info("Exception as expected for positive test result: {}",e.getMessage());
             //should happen
         }