You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@camel.apache.org by ac...@apache.org on 2019/10/21 10:43:18 UTC

[camel] branch master updated: Regen again

This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git


The following commit(s) were added to refs/heads/master by this push:
     new 5c0cbcc  Regen again
5c0cbcc is described below

commit 5c0cbccffd403d11daeec631c6088b2312f68913
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Mon Oct 21 12:42:55 2019 +0200

    Regen again
---
 .../endpoint/dsl/SparkEndpointBuilderFactory.java  | 246 ++++++++++++++-------
 1 file changed, 164 insertions(+), 82 deletions(-)

diff --git a/core/camel-endpointdsl/src/main/java/org/apache/camel/builder/endpoint/dsl/SparkEndpointBuilderFactory.java b/core/camel-endpointdsl/src/main/java/org/apache/camel/builder/endpoint/dsl/SparkEndpointBuilderFactory.java
index cc17915..7034409 100644
--- a/core/camel-endpointdsl/src/main/java/org/apache/camel/builder/endpoint/dsl/SparkEndpointBuilderFactory.java
+++ b/core/camel-endpointdsl/src/main/java/org/apache/camel/builder/endpoint/dsl/SparkEndpointBuilderFactory.java
@@ -22,8 +22,8 @@ import org.apache.camel.builder.EndpointProducerBuilder;
 import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
 
 /**
- * The spark component can be used to send RDD or DataFrame jobs to Apache Spark
- * cluster.
+ * The spark-rest component is used for hosting REST services which has been
+ * defined using Camel rest-dsl.
  * 
  * Generated by camel-package-maven-plugin - do not edit this file!
  */
@@ -32,138 +32,166 @@ public interface SparkEndpointBuilderFactory {
 
 
     /**
-     * Builder for endpoint for the Apache Spark component.
+     * Builder for endpoint for the Spark Rest component.
      */
-    public interface SparkEndpointBuilder extends EndpointProducerBuilder {
+    public interface SparkEndpointBuilder extends EndpointConsumerBuilder {
         default AdvancedSparkEndpointBuilder advanced() {
             return (AdvancedSparkEndpointBuilder) this;
         }
         /**
-         * Indicates if results should be collected or counted.
+         * Accept type such as: 'text/xml', or 'application/json'. By default we
+         * accept all kinds of types.
          * 
-         * The option is a: <code>boolean</code> type.
-         * 
-         * Group: producer
-         */
-        default SparkEndpointBuilder collect(boolean collect) {
-            doSetProperty("collect", collect);
-            return this;
-        }
-        /**
-         * Indicates if results should be collected or counted.
-         * 
-         * The option will be converted to a <code>boolean</code> type.
+         * The option is a: <code>java.lang.String</code> type.
          * 
-         * Group: producer
+         * Group: consumer
          */
-        default SparkEndpointBuilder collect(String collect) {
-            doSetProperty("collect", collect);
+        default SparkEndpointBuilder accept(String accept) {
+            doSetProperty("accept", accept);
             return this;
         }
         /**
-         * DataFrame to compute against.
+         * Determines whether or not the raw input stream from Spark
+         * HttpRequest#getContent() is cached or not (Camel will read the stream
+         * into a in light-weight memory based Stream caching) cache. By default
+         * Camel will cache the Netty input stream to support reading it
+         * multiple times to ensure Camel can retrieve all data from the stream.
+         * However you can set this option to true when you for example need to
+         * access the raw stream, such as streaming it directly to a file or
+         * other persistent store. Mind that if you enable this option, then you
+         * cannot read the Netty stream multiple times out of the box, and you
+         * would need manually to reset the reader index on the Spark raw
+         * stream.
          * 
-         * The option is a:
-         * <code>org.apache.spark.sql.Dataset&lt;org.apache.spark.sql.Row&gt;</code> type.
+         * The option is a: <code>boolean</code> type.
          * 
-         * Group: producer
+         * Group: consumer
          */
-        default SparkEndpointBuilder dataFrame(Object dataFrame) {
-            doSetProperty("dataFrame", dataFrame);
+        default SparkEndpointBuilder disableStreamCache(
+                boolean disableStreamCache) {
+            doSetProperty("disableStreamCache", disableStreamCache);
             return this;
         }
         /**
-         * DataFrame to compute against.
+         * Determines whether or not the raw input stream from Spark
+         * HttpRequest#getContent() is cached or not (Camel will read the stream
+         * into a in light-weight memory based Stream caching) cache. By default
+         * Camel will cache the Netty input stream to support reading it
+         * multiple times to ensure Camel can retrieve all data from the stream.
+         * However you can set this option to true when you for example need to
+         * access the raw stream, such as streaming it directly to a file or
+         * other persistent store. Mind that if you enable this option, then you
+         * cannot read the Netty stream multiple times out of the box, and you
+         * would need manually to reset the reader index on the Spark raw
+         * stream.
          * 
-         * The option will be converted to a
-         * <code>org.apache.spark.sql.Dataset&lt;org.apache.spark.sql.Row&gt;</code> type.
+         * The option will be converted to a <code>boolean</code> type.
          * 
-         * Group: producer
+         * Group: consumer
          */
-        default SparkEndpointBuilder dataFrame(String dataFrame) {
-            doSetProperty("dataFrame", dataFrame);
+        default SparkEndpointBuilder disableStreamCache(
+                String disableStreamCache) {
+            doSetProperty("disableStreamCache", disableStreamCache);
             return this;
         }
         /**
-         * Function performing action against an DataFrame.
+         * If this option is enabled, then during binding from Spark to Camel
+         * Message then the headers will be mapped as well (eg added as header
+         * to the Camel Message as well). You can turn off this option to
+         * disable this. The headers can still be accessed from the
+         * org.apache.camel.component.sparkrest.SparkMessage message with the
+         * method getRequest() that returns the Spark HTTP request instance.
          * 
-         * The option is a:
-         * <code>org.apache.camel.component.spark.DataFrameCallback</code> type.
+         * The option is a: <code>boolean</code> type.
          * 
-         * Group: producer
+         * Group: consumer
          */
-        default SparkEndpointBuilder dataFrameCallback(Object dataFrameCallback) {
-            doSetProperty("dataFrameCallback", dataFrameCallback);
+        default SparkEndpointBuilder mapHeaders(boolean mapHeaders) {
+            doSetProperty("mapHeaders", mapHeaders);
             return this;
         }
         /**
-         * Function performing action against an DataFrame.
+         * If this option is enabled, then during binding from Spark to Camel
+         * Message then the headers will be mapped as well (eg added as header
+         * to the Camel Message as well). You can turn off this option to
+         * disable this. The headers can still be accessed from the
+         * org.apache.camel.component.sparkrest.SparkMessage message with the
+         * method getRequest() that returns the Spark HTTP request instance.
          * 
-         * The option will be converted to a
-         * <code>org.apache.camel.component.spark.DataFrameCallback</code> type.
+         * The option will be converted to a <code>boolean</code> type.
          * 
-         * Group: producer
+         * Group: consumer
          */
-        default SparkEndpointBuilder dataFrameCallback(String dataFrameCallback) {
-            doSetProperty("dataFrameCallback", dataFrameCallback);
+        default SparkEndpointBuilder mapHeaders(String mapHeaders) {
+            doSetProperty("mapHeaders", mapHeaders);
             return this;
         }
         /**
-         * RDD to compute against.
+         * If enabled and an Exchange failed processing on the consumer side,
+         * and if the caused Exception was send back serialized in the response
+         * as a application/x-java-serialized-object content type. This is by
+         * default turned off. If you enable this then be aware that Java will
+         * deserialize the incoming data from the request to Java and that can
+         * be a potential security risk.
          * 
-         * The option is a: <code>org.apache.spark.api.java.JavaRDDLike</code>
-         * type.
+         * The option is a: <code>boolean</code> type.
          * 
-         * Group: producer
+         * Group: consumer
          */
-        default SparkEndpointBuilder rdd(Object rdd) {
-            doSetProperty("rdd", rdd);
+        default SparkEndpointBuilder transferException(boolean transferException) {
+            doSetProperty("transferException", transferException);
             return this;
         }
         /**
-         * RDD to compute against.
+         * If enabled and an Exchange failed processing on the consumer side,
+         * and if the caused Exception was send back serialized in the response
+         * as a application/x-java-serialized-object content type. This is by
+         * default turned off. If you enable this then be aware that Java will
+         * deserialize the incoming data from the request to Java and that can
+         * be a potential security risk.
          * 
-         * The option will be converted to a
-         * <code>org.apache.spark.api.java.JavaRDDLike</code> type.
+         * The option will be converted to a <code>boolean</code> type.
          * 
-         * Group: producer
+         * Group: consumer
          */
-        default SparkEndpointBuilder rdd(String rdd) {
-            doSetProperty("rdd", rdd);
+        default SparkEndpointBuilder transferException(String transferException) {
+            doSetProperty("transferException", transferException);
             return this;
         }
         /**
-         * Function performing action against an RDD.
+         * If this option is enabled, then during binding from Spark to Camel
+         * Message then the header values will be URL decoded (eg %20 will be a
+         * space character.).
          * 
-         * The option is a:
-         * <code>org.apache.camel.component.spark.RddCallback</code> type.
+         * The option is a: <code>boolean</code> type.
          * 
-         * Group: producer
+         * Group: consumer
          */
-        default SparkEndpointBuilder rddCallback(Object rddCallback) {
-            doSetProperty("rddCallback", rddCallback);
+        default SparkEndpointBuilder urlDecodeHeaders(boolean urlDecodeHeaders) {
+            doSetProperty("urlDecodeHeaders", urlDecodeHeaders);
             return this;
         }
         /**
-         * Function performing action against an RDD.
+         * If this option is enabled, then during binding from Spark to Camel
+         * Message then the header values will be URL decoded (eg %20 will be a
+         * space character.).
          * 
-         * The option will be converted to a
-         * <code>org.apache.camel.component.spark.RddCallback</code> type.
+         * The option will be converted to a <code>boolean</code> type.
          * 
-         * Group: producer
+         * Group: consumer
          */
-        default SparkEndpointBuilder rddCallback(String rddCallback) {
-            doSetProperty("rddCallback", rddCallback);
+        default SparkEndpointBuilder urlDecodeHeaders(String urlDecodeHeaders) {
+            doSetProperty("urlDecodeHeaders", urlDecodeHeaders);
             return this;
         }
     }
 
     /**
-     * Advanced builder for endpoint for the Apache Spark component.
+     * Advanced builder for endpoint for the Spark Rest component.
      */
     public interface AdvancedSparkEndpointBuilder
             extends
-                EndpointProducerBuilder {
+                EndpointConsumerBuilder {
         default SparkEndpointBuilder basic() {
             return (SparkEndpointBuilder) this;
         }
@@ -194,6 +222,56 @@ public interface SparkEndpointBuilderFactory {
             return this;
         }
         /**
+         * Whether or not the consumer should try to find a target consumer by
+         * matching the URI prefix if no exact match is found.
+         * 
+         * The option is a: <code>boolean</code> type.
+         * 
+         * Group: advanced
+         */
+        default AdvancedSparkEndpointBuilder matchOnUriPrefix(
+                boolean matchOnUriPrefix) {
+            doSetProperty("matchOnUriPrefix", matchOnUriPrefix);
+            return this;
+        }
+        /**
+         * Whether or not the consumer should try to find a target consumer by
+         * matching the URI prefix if no exact match is found.
+         * 
+         * The option will be converted to a <code>boolean</code> type.
+         * 
+         * Group: advanced
+         */
+        default AdvancedSparkEndpointBuilder matchOnUriPrefix(
+                String matchOnUriPrefix) {
+            doSetProperty("matchOnUriPrefix", matchOnUriPrefix);
+            return this;
+        }
+        /**
+         * To use a custom SparkBinding to map to/from Camel message.
+         * 
+         * The option is a:
+         * <code>org.apache.camel.component.sparkrest.SparkBinding</code> type.
+         * 
+         * Group: advanced
+         */
+        default AdvancedSparkEndpointBuilder sparkBinding(Object sparkBinding) {
+            doSetProperty("sparkBinding", sparkBinding);
+            return this;
+        }
+        /**
+         * To use a custom SparkBinding to map to/from Camel message.
+         * 
+         * The option will be converted to a
+         * <code>org.apache.camel.component.sparkrest.SparkBinding</code> type.
+         * 
+         * Group: advanced
+         */
+        default AdvancedSparkEndpointBuilder sparkBinding(String sparkBinding) {
+            doSetProperty("sparkBinding", sparkBinding);
+            return this;
+        }
+        /**
          * Sets whether synchronous processing should be strictly used, or Camel
          * is allowed to use asynchronous processing (if supported).
          * 
@@ -219,24 +297,28 @@ public interface SparkEndpointBuilderFactory {
         }
     }
     /**
-     * Apache Spark (camel-spark)
-     * The spark component can be used to send RDD or DataFrame jobs to Apache
-     * Spark cluster.
+     * Spark Rest (camel-spark-rest)
+     * The spark-rest component is used for hosting REST services which has been
+     * defined using Camel rest-dsl.
+     * 
+     * Category: rest
+     * Available as of version: 2.14
+     * Maven coordinates: org.apache.camel:camel-spark-rest
      * 
-     * Category: bigdata,iot
-     * Available as of version: 2.17
-     * Maven coordinates: org.apache.camel:camel-spark
+     * Syntax: <code>spark-rest:verb:path</code>
      * 
-     * Syntax: <code>spark:endpointType</code>
+     * Path parameter: verb (required)
+     * get, post, put, patch, delete, head, trace, connect, or options.
+     * The value can be one of: get, post, put, patch, delete, head, trace,
+     * connect, options
      * 
-     * Path parameter: endpointType (required)
-     * Type of the endpoint (rdd, dataframe, hive).
-     * The value can be one of: rdd, dataframe, hive
+     * Path parameter: path (required)
+     * The content path which support Spark syntax.
      */
-    default SparkEndpointBuilder spark(String path) {
+    default SparkEndpointBuilder sparkRest(String path) {
         class SparkEndpointBuilderImpl extends AbstractEndpointBuilder implements SparkEndpointBuilder, AdvancedSparkEndpointBuilder {
             public SparkEndpointBuilderImpl(String path) {
-                super("spark", path);
+                super("spark-rest", path);
             }
         }
         return new SparkEndpointBuilderImpl(path);