You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@camel.apache.org by ac...@apache.org on 2020/02/03 08:21:04 UTC

[camel] branch master updated (61f37d8 -> a65dc8c)

This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git.


    from 61f37d8  Fixed camel-weak syntax. The endpoint should be singleton.
     new f31ac40  Camel-Weka: Should use log4j2
     new 36e79ae  Camel-Weka: First batch of CS fixes
     new a65dc8c  Camel-Weka: Second part of CS fixes

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 components/camel-weka/pom.xml                      | 14 +++-
 .../apache/camel/component/weka/WekaComponent.java |  4 +-
 .../camel/component/weka/WekaConfiguration.java    |  4 +-
 .../apache/camel/component/weka/WekaEndpoint.java  |  6 +-
 .../apache/camel/component/weka/WekaProducer.java  | 85 +++++++++++-----------
 .../camel/component/weka/WekaTypeConverters.java   | 14 ++--
 .../apache/camel/component/weka/FilterTest.java    | 81 ++++++++++-----------
 .../apache/camel/component/weka/ReadWriteTest.java | 78 +++++++++-----------
 .../camel-weka/src/test/resources/log4j.properties | 21 ------
 .../src/test/resources/log4j2.properties           |  2 +-
 10 files changed, 145 insertions(+), 164 deletions(-)
 delete mode 100644 components/camel-weka/src/test/resources/log4j.properties
 copy components/{camel-coap => camel-weka}/src/test/resources/log4j2.properties (96%)


[camel] 02/03: Camel-Weka: First batch of CS fixes

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 36e79ae5f4b45038fe5f45cd62611bc4d6f87fe6
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Mon Feb 3 09:18:00 2020 +0100

    Camel-Weka: First batch of CS fixes
---
 .../apache/camel/component/weka/WekaComponent.java |  4 +-
 .../camel/component/weka/WekaConfiguration.java    |  4 +-
 .../apache/camel/component/weka/WekaEndpoint.java  |  5 +-
 .../apache/camel/component/weka/WekaProducer.java  | 76 +++++++++++-----------
 .../camel/component/weka/WekaTypeConverters.java   |  6 +-
 .../apache/camel/component/weka/FilterTest.java    | 76 +++++++++++-----------
 .../apache/camel/component/weka/ReadWriteTest.java | 75 ++++++++++-----------
 7 files changed, 120 insertions(+), 126 deletions(-)

diff --git a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaComponent.java b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaComponent.java
index 57405a1..acdecc7 100644
--- a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaComponent.java
+++ b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaComponent.java
@@ -32,10 +32,10 @@ public class WekaComponent extends DefaultComponent {
         WekaConfiguration config = new WekaConfiguration();
         WekaEndpoint endpoint = new WekaEndpoint(urispec, this, config);
         setProperties(endpoint, params);
-        
+
         Command command = Command.valueOf(remaining);
         config.setCommand(command);
-        
+
         return endpoint;
     }
 }
diff --git a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaConfiguration.java b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaConfiguration.java
index 39046b8..7aea74d 100644
--- a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaConfiguration.java
+++ b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaConfiguration.java
@@ -25,7 +25,7 @@ public class WekaConfiguration {
 
     // Available commands
     public enum Command {
-        filter, read, write, version 
+        filter, read, write, version
     }
 
     @UriPath(description = "The filter command")
@@ -38,7 +38,7 @@ public class WekaConfiguration {
     private String path;
 
     private Command command;
-    
+
     Command getCommand() {
         return command;
     }
diff --git a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaEndpoint.java b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaEndpoint.java
index 3685ae5..251cbd1 100644
--- a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaEndpoint.java
+++ b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaEndpoint.java
@@ -30,12 +30,11 @@ import weka.core.Version;
 /**
  * The camel-weka component provides Data Mining functionality through Weka.
  */
-@UriEndpoint(firstVersion = "3.1.0", scheme = "weka", title = "Weka",
-        syntax = "weka:cmd", producerOnly = true, label = "Datamining")
+@UriEndpoint(firstVersion = "3.1.0", scheme = "weka", title = "Weka", syntax = "weka:cmd", producerOnly = true, label = "Datamining")
 public class WekaEndpoint extends DefaultEndpoint {
 
     static final Logger LOG = LoggerFactory.getLogger(WekaEndpoint.class);
-    
+
     @UriParam
     private final WekaConfiguration configuration;
 
diff --git a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaProducer.java b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaProducer.java
index f8e9493..c99f5ac 100644
--- a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaProducer.java
+++ b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaProducer.java
@@ -61,7 +61,7 @@ public class WekaProducer extends DefaultProducer {
 
         WekaEndpoint endpoint = getEndpoint();
         Command cmd = getConfiguration().getCommand();
-        
+
         if (Command.version == cmd) {
 
             Message msg = exchange.getMessage();
@@ -81,41 +81,41 @@ public class WekaProducer extends DefaultProducer {
 
             Message msg = exchange.getMessage();
             msg.setBody(handleFilterCmd(exchange));
-            
+
         }
     }
 
     private Dataset handleReadCmd(Exchange exchange) throws Exception {
-        
+
         String fpath = getConfiguration().getPath();
-        
+
         if (fpath != null) {
             Dataset dataset = Dataset.create(fpath);
             return dataset;
         }
-        
+
         Dataset dataset = assertDatasetBody(exchange);
         return dataset;
     }
 
     private Object handleWriteCmd(Exchange exchange) throws Exception {
-        
+
         Dataset dataset = assertDatasetBody(exchange);
         String fpath = getConfiguration().getPath();
-        
+
         if (fpath != null) {
-            
+
             dataset.write(Paths.get(fpath));
             return dataset;
-            
+
         } else {
-            
-            // The internal implementation of DataSink does this.. 
+
+            // The internal implementation of DataSink does this..
             // Instances.toString().getBytes()
             //
             // Therefore, we avoid creating yet another copy of the
             // instance data and call Instances.toString() as well
-            
+
             Instances instances = dataset.getInstances();
             byte[] bytes = instances.toString().getBytes();
             return new ByteArrayInputStream(bytes);
@@ -123,63 +123,63 @@ public class WekaProducer extends DefaultProducer {
     }
 
     private Dataset handleFilterCmd(Exchange exchange) throws Exception {
-        
+
         String applyValue = getConfiguration().getApply();
 
         Dataset dataset = assertDatasetBody(exchange);
         dataset = dataset.apply(applyValue);
-        
+
         return dataset;
     }
 
     private Dataset assertDatasetBody(Exchange exchange) throws Exception {
-        
+
         Message msg = exchange.getMessage();
         Object body = msg.getBody();
-        
+
         Dataset dataset = msg.getBody(Dataset.class);
-        
+
         if (dataset == null) {
-            
+
             if (body instanceof Instances) {
 
-                dataset = Dataset.create((Instances) body);
-                
+                dataset = Dataset.create((Instances)body);
+
             } else if (body instanceof GenericFile) {
-                
-                GenericFile<?> file = (GenericFile<?>) body;
+
+                GenericFile<?> file = (GenericFile<?>)body;
                 AssertState.isFalse(file.isDirectory(), "Directory not supported: " + file);
                 String absolutePath = file.getAbsoluteFilePath();
                 dataset = Dataset.create(absolutePath);
-                
+
             } else if (body instanceof URL) {
-                
-                URL url = (URL) body;
+
+                URL url = (URL)body;
                 Instances instances = readInternal(url.openStream());
                 dataset = Dataset.create(instances);
-                
+
             } else if (body instanceof InputStream) {
-                
-                InputStream input = (InputStream) body;
+
+                InputStream input = (InputStream)body;
                 Instances instances = readInternal(input);
                 dataset = Dataset.create(instances);
             }
         }
-        
+
         AssertState.notNull(dataset, "Cannot obtain dataset from body: " + body);
         return dataset;
     }
 
     // https://github.com/tdiesler/nessus-weka/issues/11
     private static Instances readInternal(InputStream input) {
-        
+
         Instances instances = null;
-        
+
         try {
-            
+
             if (input.markSupported())
                 input.mark(10240);
-            
+
             // First try .arff
             try {
                 Loader loader = new ArffLoader();
@@ -195,13 +195,13 @@ public class WekaProducer extends DefaultProducer {
                     input.reset();
                 }
             }
-            
+
             // Next try .csv
             if (instances == null) {
 
                 ByteArrayOutputStream baos = new ByteArrayOutputStream();
                 BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(baos));
-                
+
                 try (BufferedReader br = new BufferedReader(new InputStreamReader(input))) {
                     String line = br.readLine();
                     while (line != null) {
@@ -215,17 +215,17 @@ public class WekaProducer extends DefaultProducer {
                 }
 
                 input = new ByteArrayInputStream(baos.toByteArray());
-                
+
                 Loader loader = new CSVLoader();
                 loader.setSource(input);
                 loader.getStructure();
                 instances = loader.getDataSet();
             }
-            
+
         } catch (Exception ex) {
             throw UncheckedException.create(ex);
         }
-        
+
         return instances;
     }
 }
diff --git a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaTypeConverters.java b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaTypeConverters.java
index 30f7ea1..4004209 100644
--- a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaTypeConverters.java
+++ b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaTypeConverters.java
@@ -28,14 +28,14 @@ import weka.core.Instances;
 public class WekaTypeConverters {
 
     @Converter
-    public static InputStream toInputStream(Dataset dataset)  {
+    public static InputStream toInputStream(Dataset dataset) {
         Instances instances = dataset.getInstances();
         return toInputStream(instances);
     }
 
     @Converter
-    public static InputStream toInputStream(Instances instances)  {
+    public static InputStream toInputStream(Instances instances) {
         byte[] bytes = instances.toString().getBytes();
         return new ByteArrayInputStream(bytes);
     }
- }
+}
diff --git a/components/camel-weka/src/test/java/org/apache/camel/component/weka/FilterTest.java b/components/camel-weka/src/test/java/org/apache/camel/component/weka/FilterTest.java
index c0fe527..f48c48d 100644
--- a/components/camel-weka/src/test/java/org/apache/camel/component/weka/FilterTest.java
+++ b/components/camel-weka/src/test/java/org/apache/camel/component/weka/FilterTest.java
@@ -37,35 +37,35 @@ public class FilterTest {
     public void readFromFileFilterAndWrite() throws Exception {
 
         try (CamelContext camelctx = new DefaultCamelContext()) {
-            
+
             camelctx.addRoutes(new RouteBuilder() {
-                
+
                 @Override
                 public void configure() throws Exception {
-                    
+
                     // Use the file component to read the CSV file
                     from("file:src/test/resources/data?fileName=sfny.csv&noop=true")
-                    
-                    // Convert the 'in_sf' attribute to nominal
-                    .to("weka:filter?apply=NumericToNominal -R first")
-                    
-                    // Move the 'in_sf' attribute to the end
-                    .to("weka:filter?apply=Reorder -R 2-last,1")
-                    
-                    // Rename the relation
-                    .to("weka:filter?apply=RenameRelation -modify sfny")
-                    
-                    // Use the file component to write the Arff file
-                    .to("file:target/data?fileName=sfny.arff")
-                    
-                    .to("direct:end");
+
+                        // Convert the 'in_sf' attribute to nominal
+                        .to("weka:filter?apply=NumericToNominal -R first")
+
+                        // Move the 'in_sf' attribute to the end
+                        .to("weka:filter?apply=Reorder -R 2-last,1")
+
+                        // Rename the relation
+                        .to("weka:filter?apply=RenameRelation -modify sfny")
+
+                        // Use the file component to write the Arff file
+                        .to("file:target/data?fileName=sfny.arff")
+
+                        .to("direct:end");
                 }
             });
             camelctx.start();
-            
+
             ConsumerTemplate consumer = camelctx.createConsumerTemplate();
             consumer.receiveBody("direct:end");
-            
+
             Path inpath = Paths.get("target/data/sfny.arff");
             Instances instances = DatasetUtils.read(inpath);
             Assert.assertEquals("sfny", instances.relationName());
@@ -76,28 +76,28 @@ public class FilterTest {
     public void readWithWekaFilterAndWrite() throws Exception {
 
         try (CamelContext camelctx = new DefaultCamelContext()) {
-            
+
             camelctx.addRoutes(new RouteBuilder() {
-                
+
                 @Override
                 public void configure() throws Exception {
-                    
+
                     from("direct:start")
-                    
-                    // Use Weka to read the CSV file
-                    .to("weka:read?path=src/test/resources/data/sfny.csv")
-                    
-                    // Convert the 'in_sf' attribute to nominal
-                    .to("weka:filter?apply=NumericToNominal -R first")
-                    
-                    // Move the 'in_sf' attribute to the end
-                    .to("weka:filter?apply=Reorder -R 2-last,1")
-                    
-                    // Rename the relation
-                    .to("weka:filter?apply=RenameRelation -modify sfny")
-                    
-                    // Use Weka to write the Arff file
-                    .to("weka:write?path=target/data/sfny.arff");
+
+                        // Use Weka to read the CSV file
+                        .to("weka:read?path=src/test/resources/data/sfny.csv")
+
+                        // Convert the 'in_sf' attribute to nominal
+                        .to("weka:filter?apply=NumericToNominal -R first")
+
+                        // Move the 'in_sf' attribute to the end
+                        .to("weka:filter?apply=Reorder -R 2-last,1")
+
+                        // Rename the relation
+                        .to("weka:filter?apply=RenameRelation -modify sfny")
+
+                        // Use Weka to write the Arff file
+                        .to("weka:write?path=target/data/sfny.arff");
                 }
             });
             camelctx.start();
@@ -105,7 +105,7 @@ public class FilterTest {
             ProducerTemplate producer = camelctx.createProducerTemplate();
             Dataset dataset = producer.requestBody("direct:start", null, Dataset.class);
             Assert.assertEquals("sfny", dataset.getInstances().relationName());
-            
+
             dataset = Dataset.create("target/data/sfny.arff");
             Assert.assertEquals("sfny", dataset.getInstances().relationName());
         }
diff --git a/components/camel-weka/src/test/java/org/apache/camel/component/weka/ReadWriteTest.java b/components/camel-weka/src/test/java/org/apache/camel/component/weka/ReadWriteTest.java
index d50159e..eb64406 100644
--- a/components/camel-weka/src/test/java/org/apache/camel/component/weka/ReadWriteTest.java
+++ b/components/camel-weka/src/test/java/org/apache/camel/component/weka/ReadWriteTest.java
@@ -37,7 +37,7 @@ public class ReadWriteTest {
     public void wekaVersion() throws Exception {
 
         try (CamelContext camelctx = new DefaultCamelContext()) {
-            
+
             camelctx.addRoutes(new RouteBuilder() {
                 @Override
                 public void configure() throws Exception {
@@ -45,38 +45,37 @@ public class ReadWriteTest {
                 }
             });
             camelctx.start();
-            
+
             ProducerTemplate producer = camelctx.createProducerTemplate();
             String res = producer.requestBody("direct:start", null, String.class);
             Assert.assertTrue(res.startsWith("3.8"));
         }
     }
-    
+
     @Test
     public void readCsvFile() throws Exception {
 
         try (CamelContext camelctx = new DefaultCamelContext()) {
-            
+
             camelctx.addRoutes(new RouteBuilder() {
                 @Override
                 public void configure() throws Exception {
-                    from("file:src/test/resources/data?fileName=sfny.csv&noop=true")
-                        .to("weka:read").to("direct:end");
+                    from("file:src/test/resources/data?fileName=sfny.csv&noop=true").to("weka:read").to("direct:end");
                 }
             });
             camelctx.start();
-            
+
             ConsumerTemplate consumer = camelctx.createConsumerTemplate();
             Dataset dataset = consumer.receiveBody("direct:end", Dataset.class);
             Assert.assertNotNull(dataset);
         }
     }
-    
+
     @Test
     public void readCsvUrl() throws Exception {
 
         try (CamelContext camelctx = new DefaultCamelContext()) {
-            
+
             camelctx.addRoutes(new RouteBuilder() {
                 @Override
                 public void configure() throws Exception {
@@ -84,21 +83,21 @@ public class ReadWriteTest {
                 }
             });
             camelctx.start();
-            
+
             Path absPath = Paths.get("src/test/resources/data/sfny.csv").toAbsolutePath();
             URL sourceUrl = absPath.toUri().toURL();
-            
+
             ProducerTemplate producer = camelctx.createProducerTemplate();
             Dataset dataset = producer.requestBody("direct:start", sourceUrl, Dataset.class);
             Assert.assertNotNull(dataset);
         }
     }
-    
+
     @Test
     public void readCsvInputStream() throws Exception {
 
         try (CamelContext camelctx = new DefaultCamelContext()) {
-            
+
             camelctx.addRoutes(new RouteBuilder() {
                 @Override
                 public void configure() throws Exception {
@@ -106,10 +105,10 @@ public class ReadWriteTest {
                 }
             });
             camelctx.start();
-            
+
             Path absPath = Paths.get("src/test/resources/data/sfny.csv").toAbsolutePath();
             InputStream input = absPath.toUri().toURL().openStream();
-            
+
             ProducerTemplate producer = camelctx.createProducerTemplate();
             Dataset dataset = producer.requestBody("direct:start", input, Dataset.class);
             Assert.assertNotNull(dataset);
@@ -120,7 +119,7 @@ public class ReadWriteTest {
     public void readArffWithPath() throws Exception {
 
         try (CamelContext camelctx = new DefaultCamelContext()) {
-            
+
             camelctx.addRoutes(new RouteBuilder() {
                 @Override
                 public void configure() throws Exception {
@@ -128,7 +127,7 @@ public class ReadWriteTest {
                 }
             });
             camelctx.start();
-            
+
             ProducerTemplate producer = camelctx.createProducerTemplate();
             Dataset dataset = producer.requestBody("direct:start", null, Dataset.class);
             Assert.assertNotNull(dataset);
@@ -139,7 +138,7 @@ public class ReadWriteTest {
     public void readArffInputStream() throws Exception {
 
         try (CamelContext camelctx = new DefaultCamelContext()) {
-            
+
             camelctx.addRoutes(new RouteBuilder() {
                 @Override
                 public void configure() throws Exception {
@@ -147,10 +146,10 @@ public class ReadWriteTest {
                 }
             });
             camelctx.start();
-            
+
             Path absPath = Paths.get("src/test/resources/data/sfny.arff").toAbsolutePath();
             InputStream input = absPath.toUri().toURL().openStream();
-            
+
             ProducerTemplate producer = camelctx.createProducerTemplate();
             Dataset dataset = producer.requestBody("direct:start", input, Dataset.class);
             Assert.assertNotNull(dataset);
@@ -161,22 +160,21 @@ public class ReadWriteTest {
     public void writeDatasetWithConversion() throws Exception {
 
         try (CamelContext camelctx = new DefaultCamelContext()) {
-            
+
             camelctx.addRoutes(new RouteBuilder() {
                 @Override
                 public void configure() throws Exception {
-                    from("direct:start")
-                        .to("file:target/data?fileName=sfny.arff");
+                    from("direct:start").to("file:target/data?fileName=sfny.arff");
                 }
             });
             camelctx.start();
-            
+
             Path inpath = Paths.get("src/test/resources/data/sfny.arff");
             Dataset dataset = Dataset.create(inpath);
-            
+
             ProducerTemplate producer = camelctx.createProducerTemplate();
             producer.sendBody("direct:start", dataset);
-            
+
             Path outpath = Paths.get("target/data/sfny.arff");
             dataset = Dataset.create(outpath);
             Assert.assertNotNull(dataset);
@@ -187,23 +185,21 @@ public class ReadWriteTest {
     public void writeDatasetWithoutPath() throws Exception {
 
         try (CamelContext camelctx = new DefaultCamelContext()) {
-            
+
             camelctx.addRoutes(new RouteBuilder() {
                 @Override
                 public void configure() throws Exception {
-                    from("direct:start")
-                        .to("weka:write")
-                        .to("file:target/data?fileName=sfny.arff");
+                    from("direct:start").to("weka:write").to("file:target/data?fileName=sfny.arff");
                 }
             });
             camelctx.start();
-            
+
             Path inpath = Paths.get("src/test/resources/data/sfny.arff");
             Dataset dataset = Dataset.create(inpath);
-            
+
             ProducerTemplate producer = camelctx.createProducerTemplate();
             producer.sendBody("direct:start", dataset);
-            
+
             Path outpath = Paths.get("target/data/sfny.arff");
             dataset = Dataset.create(outpath);
             Assert.assertNotNull(dataset);
@@ -214,26 +210,25 @@ public class ReadWriteTest {
     public void writeDatasetWithPath() throws Exception {
 
         try (CamelContext camelctx = new DefaultCamelContext()) {
-            
+
             camelctx.addRoutes(new RouteBuilder() {
                 @Override
                 public void configure() throws Exception {
-                    from("direct:start")
-                        .to("weka:write?path=target/data/sfny.arff");
+                    from("direct:start").to("weka:write?path=target/data/sfny.arff");
                 }
             });
             camelctx.start();
-            
+
             Path inpath = Paths.get("src/test/resources/data/sfny.arff");
             Dataset dataset = Dataset.create(inpath);
-            
+
             ProducerTemplate producer = camelctx.createProducerTemplate();
             producer.sendBody("direct:start", dataset);
-            
+
             Path outpath = Paths.get("target/data/sfny.arff");
             dataset = Dataset.create(outpath);
             Assert.assertNotNull(dataset);
         }
     }
-    
+
 }


[camel] 01/03: Camel-Weka: Should use log4j2

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit f31ac4044b4e15e352712d17d321642bfb0e5d12
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Mon Feb 3 09:10:50 2020 +0100

    Camel-Weka: Should use log4j2
---
 components/camel-weka/pom.xml                      | 14 +++++++++--
 .../camel-weka/src/test/resources/log4j.properties | 21 ----------------
 .../src/test/resources/log4j2.properties           | 28 ++++++++++++++++++++++
 3 files changed, 40 insertions(+), 23 deletions(-)

diff --git a/components/camel-weka/pom.xml b/components/camel-weka/pom.xml
index d961ba3..b8c7227 100644
--- a/components/camel-weka/pom.xml
+++ b/components/camel-weka/pom.xml
@@ -58,8 +58,18 @@
             <scope>test</scope>
         </dependency>
         <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-log4j12</artifactId>
+            <groupId>org.apache.logging.log4j</groupId>
+            <artifactId>log4j-api</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.logging.log4j</groupId>
+            <artifactId>log4j-core</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.logging.log4j</groupId>
+            <artifactId>log4j-slf4j-impl</artifactId>
             <scope>test</scope>
         </dependency>
     </dependencies>
diff --git a/components/camel-weka/src/test/resources/log4j.properties b/components/camel-weka/src/test/resources/log4j.properties
deleted file mode 100644
index f24e9f8..0000000
--- a/components/camel-weka/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,21 +0,0 @@
-# Root logger option
-log4j.rootLogger=DEBUG, file, console
-
-# Thirdparty categories
-#log4j.logger.com.foo.bar=ERROR
-
-# Direct log messages to a log file
-log4j.appender.file=org.apache.log4j.RollingFileAppender
-log4j.appender.file.file=target/debug.log
-log4j.appender.file.append=false
-log4j.appender.file.maxFileSize=10MB
-log4j.appender.file.layout=org.apache.log4j.PatternLayout
-log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n
-log4j.appender.file.threshold=DEBUG
- 
-# Direct log messages to console
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.out
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%m%n
-log4j.appender.console.threshold=ERROR
diff --git a/components/camel-weka/src/test/resources/log4j2.properties b/components/camel-weka/src/test/resources/log4j2.properties
new file mode 100644
index 0000000..b13ce9c
--- /dev/null
+++ b/components/camel-weka/src/test/resources/log4j2.properties
@@ -0,0 +1,28 @@
+## ---------------------------------------------------------------------------
+## Licensed to the Apache Software Foundation (ASF) under one or more
+## contributor license agreements.  See the NOTICE file distributed with
+## this work for additional information regarding copyright ownership.
+## The ASF licenses this file to You under the Apache License, Version 2.0
+## (the "License"); you may not use this file except in compliance with
+## the License.  You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+## ---------------------------------------------------------------------------
+
+appender.file.type = File
+appender.file.name = file
+appender.file.fileName = target/camel-weka-test.log
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d [%-15.15t] %-5p %-30.30c{1} - %m%n
+appender.out.type = Console
+appender.out.name = out
+appender.out.layout.type = PatternLayout
+appender.out.layout.pattern = %d [%-15.15t] %-5p %-30.30c{1} - %m%n
+rootLogger.level = INFO
+rootLogger.appenderRef.file.ref = file


[camel] 03/03: Camel-Weka: Second part of CS fixes

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit a65dc8caa966548e390a35b2496bee31651db140
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Mon Feb 3 09:20:37 2020 +0100

    Camel-Weka: Second part of CS fixes
---
 .../java/org/apache/camel/component/weka/WekaEndpoint.java    |  1 -
 .../java/org/apache/camel/component/weka/WekaProducer.java    | 11 +++++------
 .../org/apache/camel/component/weka/WekaTypeConverters.java   |  8 +++++---
 .../test/java/org/apache/camel/component/weka/FilterTest.java |  5 ++---
 .../java/org/apache/camel/component/weka/ReadWriteTest.java   |  3 +--
 5 files changed, 13 insertions(+), 15 deletions(-)

diff --git a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaEndpoint.java b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaEndpoint.java
index 251cbd1..bd6e995 100644
--- a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaEndpoint.java
+++ b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaEndpoint.java
@@ -24,7 +24,6 @@ import org.apache.camel.spi.UriParam;
 import org.apache.camel.support.DefaultEndpoint;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-
 import weka.core.Version;
 
 /**
diff --git a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaProducer.java b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaProducer.java
index c99f5ac..e6962bf 100644
--- a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaProducer.java
+++ b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaProducer.java
@@ -27,15 +27,14 @@ import java.io.OutputStreamWriter;
 import java.net.URL;
 import java.nio.file.Paths;
 
+import io.nessus.weka.AssertState;
+import io.nessus.weka.Dataset;
+import io.nessus.weka.UncheckedException;
 import org.apache.camel.Exchange;
 import org.apache.camel.Message;
 import org.apache.camel.component.file.GenericFile;
 import org.apache.camel.component.weka.WekaConfiguration.Command;
 import org.apache.camel.support.DefaultProducer;
-
-import io.nessus.weka.AssertState;
-import io.nessus.weka.Dataset;
-import io.nessus.weka.UncheckedException;
 import weka.core.Instances;
 import weka.core.converters.ArffLoader;
 import weka.core.converters.CSVLoader;
@@ -177,9 +176,9 @@ public class WekaProducer extends DefaultProducer {
 
         try {
 
-            if (input.markSupported())
+            if (input.markSupported()) {
                 input.mark(10240);
-
+            }
             // First try .arff
             try {
                 Loader loader = new ArffLoader();
diff --git a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaTypeConverters.java b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaTypeConverters.java
index 4004209..c3548cf 100644
--- a/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaTypeConverters.java
+++ b/components/camel-weka/src/main/java/org/apache/camel/component/weka/WekaTypeConverters.java
@@ -19,13 +19,15 @@ package org.apache.camel.component.weka;
 import java.io.ByteArrayInputStream;
 import java.io.InputStream;
 
-import org.apache.camel.Converter;
-
 import io.nessus.weka.Dataset;
+import org.apache.camel.Converter;
 import weka.core.Instances;
 
 @Converter
-public class WekaTypeConverters {
+public final class WekaTypeConverters {
+    
+    private WekaTypeConverters() {
+    }
 
     @Converter
     public static InputStream toInputStream(Dataset dataset) {
diff --git a/components/camel-weka/src/test/java/org/apache/camel/component/weka/FilterTest.java b/components/camel-weka/src/test/java/org/apache/camel/component/weka/FilterTest.java
index f48c48d..2896bd6 100644
--- a/components/camel-weka/src/test/java/org/apache/camel/component/weka/FilterTest.java
+++ b/components/camel-weka/src/test/java/org/apache/camel/component/weka/FilterTest.java
@@ -19,6 +19,8 @@ package org.apache.camel.component.weka;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 
+import io.nessus.weka.Dataset;
+import io.nessus.weka.utils.DatasetUtils;
 import org.apache.camel.CamelContext;
 import org.apache.camel.ConsumerTemplate;
 import org.apache.camel.ProducerTemplate;
@@ -26,9 +28,6 @@ import org.apache.camel.builder.RouteBuilder;
 import org.apache.camel.impl.DefaultCamelContext;
 import org.junit.Assert;
 import org.junit.Test;
-
-import io.nessus.weka.Dataset;
-import io.nessus.weka.utils.DatasetUtils;
 import weka.core.Instances;
 
 public class FilterTest {
diff --git a/components/camel-weka/src/test/java/org/apache/camel/component/weka/ReadWriteTest.java b/components/camel-weka/src/test/java/org/apache/camel/component/weka/ReadWriteTest.java
index eb64406..d98915c 100644
--- a/components/camel-weka/src/test/java/org/apache/camel/component/weka/ReadWriteTest.java
+++ b/components/camel-weka/src/test/java/org/apache/camel/component/weka/ReadWriteTest.java
@@ -21,6 +21,7 @@ import java.net.URL;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 
+import io.nessus.weka.Dataset;
 import org.apache.camel.CamelContext;
 import org.apache.camel.ConsumerTemplate;
 import org.apache.camel.ProducerTemplate;
@@ -29,8 +30,6 @@ import org.apache.camel.impl.DefaultCamelContext;
 import org.junit.Assert;
 import org.junit.Test;
 
-import io.nessus.weka.Dataset;
-
 public class ReadWriteTest {
 
     @Test