You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@camel.apache.org by ac...@apache.org on 2019/10/25 14:41:21 UTC

[camel] 01/02: Camel-Spark: Use only Spark in the Title

This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 1dc28db2ddd8fa20643fa800fe7aaea03cfdc690
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Fri Oct 25 16:33:48 2019 +0200

    Camel-Spark: Use only Spark in the Title
---
 .../src/main/java/org/apache/camel/component/spark/SparkEndpoint.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/components/camel-spark/src/main/java/org/apache/camel/component/spark/SparkEndpoint.java b/components/camel-spark/src/main/java/org/apache/camel/component/spark/SparkEndpoint.java
index cbd7f1b..83689a9 100644
--- a/components/camel-spark/src/main/java/org/apache/camel/component/spark/SparkEndpoint.java
+++ b/components/camel-spark/src/main/java/org/apache/camel/component/spark/SparkEndpoint.java
@@ -34,7 +34,7 @@ import static org.slf4j.LoggerFactory.getLogger;
 /**
  * The spark component can be used to send RDD or DataFrame jobs to Apache Spark cluster.
  */
-@UriEndpoint(firstVersion = "2.17.0", scheme = "spark", title = "Apache Spark", syntax = "spark:endpointType",
+@UriEndpoint(firstVersion = "2.17.0", scheme = "spark", title = "Spark", syntax = "spark:endpointType",
         producerOnly = true, label = "bigdata,iot")
 public class SparkEndpoint extends DefaultEndpoint {