You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@camel.apache.org by ac...@apache.org on 2022/12/02 08:08:32 UTC

[camel-kamelets] branch main updated (03c2d24f -> b1dec7f3)

This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a change to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git


    from 03c2d24f Updated CHANGELOG.md
     new 4595e5f2 Introduce Kamelet input/output data types
     new f71ca6e8 Refine Kamelet data type solution with review comments
     new 4bdbc2d7 Fix Jitpack coordinates replacement and use KinD cluster v0.14.0
     new 96534b4f Add CloudEvent output type on AWS S3 Kamelet source
     new ed28941e Use log-sink Kamelet and show headers
     new 814654a8 Fail on missing data type and add log output
     new eb6b71ef Make sure data type resolver works on all runtimes
     new cf269604 Load S3 converters via annotation scan
     new baba9ef6 Preserve AWS S3 Key header as it is required during onCompletion
     new 30c5571f Remove AWS S3 Json output type
     new f142f382 Load AWS DDB converters via annotation scan
     new ca5892d1 Fix AWS DDB sink Kamelet
     new c230f2ad Enhance YAKS tests with AWS S3 data type test
     new 49cbe909 Fix cloud event type and do not set data content type
     new 96f2c0d9 Enhance data type AWS S3 YAKS tests
     new d62d2d0d Add option to disable data type registry classpath scan
     new 5595de75 Set proper media types
     new 6f206cab Fix rest-openapi-sink YAKS test
     new e3912879 Remove camel-cloudevents dependency
     new 606adff2 Move AWS S3 binary output type to generic level
     new d161ccfc Do cache ObjectMapper instance in JsonModelDatType converter
     new 429011c4 Enhance documentation on data type SPI
     new c64f31b0 Improve CloudEvents output produced by AWS S3 source
     new 00ccfe7e Simplify Json model data type
     new f6c24249 Fix Knative YAKS tests
     new a0406cc7 Revert existing Kamelets to not use data type converter
     new b45bb5dd Add experimental Kamelets using data type converter API
     new b1dec7f3 Include experimental Kamelets in the catalog

The 28 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../actions/install-knative/action.yml             |  23 +--
 .github/actions/install-knative/install-knative.sh | 142 ++++++++++++++
 .github/workflows/yaks-tests.yaml                  |  27 ++-
 .../aws-ddb-experimental-sink.kamelet.yaml         |  35 +++-
 ...aml => aws-s3-experimental-source.kamelet.yaml} |  29 ++-
 library/camel-kamelets-utils/pom.xml               |   7 +-
 .../utils/format/AnnotationDataTypeLoader.java     | 171 +++++++++++++++++
 .../kamelets/utils/format/DataTypeProcessor.java   | 110 +++++++++++
 .../utils/format/DefaultDataTypeConverter.java     |  85 +++++++++
 .../format/DefaultDataTypeConverterResolver.java   |  68 +++++++
 .../utils/format/DefaultDataTypeRegistry.java      | 208 +++++++++++++++++++++
 .../converter/aws2/ddb/Ddb2JsonInputType.java}     |  87 ++++++---
 .../aws2/s3/AWS2S3CloudEventOutputType.java        |  58 ++++++
 .../converter/standard/BinaryDataType.java}        |  27 +--
 .../converter/standard/JsonModelDataType.java      |  97 ++++++++++
 .../converter/standard/StringDataType.java}        |  25 ++-
 .../utils/format/spi/DataTypeConverter.java        |  75 ++++++++
 .../format/spi/DataTypeConverterResolver.java      |  53 ++++++
 .../kamelets/utils/format/spi/DataTypeLoader.java} |  14 +-
 .../utils/format/spi/DataTypeRegistry.java         |  68 +++++++
 .../utils/format/spi/annotations/DataType.java     |  56 ++++++
 .../transform/aws/ddb/JsonToDdbModelConverter.java |   2 +-
 .../services/org/apache/camel/DataTypeConverter    |   9 +-
 .../apache/camel/datatype/converter/aws2-ddb-json  |   9 +-
 .../camel/datatype/converter/aws2-s3-cloudevents   |   9 +-
 .../apache/camel/datatype/converter/camel-binary   |   9 +-
 .../camel/datatype/converter/camel-jsonObject      |   9 +-
 .../apache/camel/datatype/converter/camel-string   |   9 +-
 .../utils/format/DataTypeProcessorTest.java        |  99 ++++++++++
 .../DefaultDataTypeConverterResolverTest.java      |  76 ++++++++
 .../utils/format/DefaultDataTypeRegistryTest.java  |  60 ++++++
 .../converter/aws2/ddb/Ddb2JsonInputTypeTest.java} | 104 +++++++----
 .../aws2/s3/AWS2S3CloudEventOutputTypeTest.java    |  67 +++++++
 .../converter/standard/BinaryDataTypeTest.java     | 102 ++++++++++
 .../converter/standard/JsonModelDataTypeTest.java  | 102 ++++++++++
 .../converter/standard/StringDataTypeTest.java     |  90 +++++++++
 .../format/converter/test/UppercaseDataType.java}  |  20 +-
 .../aws/ddb/JsonToDdbModelConverterTest.java       |  17 +-
 .../services/org/apache/camel/DataTypeConverter    |   9 +-
 .../camel/datatype/converter/camel-jsonObject      |   9 +-
 .../camel/datatype/converter/camel-lowercase       |   9 +-
 .../org/apache/camel/datatype/converter/foo-json   |   9 +-
 .../src/test/resources/log4j2-test.xml             |  32 ++++
 ...yaml => aws-ddb-experimental-sink.kamelet.yaml} |  35 +++-
 .../aws-s3-experimental-source.kamelet.yaml        |  29 ++-
 test/aws-s3/README.md                              |  76 ++++++++
 .../amazonS3Client.groovy}                         |  27 +--
 test/aws-s3/aws-s3-credentials.properties          |   7 +
 test/aws-s3/aws-s3-knative-binding.feature         |  49 +++++
 test/aws-s3/aws-s3-source-property-conf.feature    |  37 ++++
 test/aws-s3/aws-s3-source-secret-conf.feature      |  39 ++++
 test/aws-s3/aws-s3-source-uri-conf.feature         |  32 ++++
 .../aws-s3-to-knative-channel.yaml}                |  32 ++--
 .../aws-s3-to-log-secret-based.groovy}             |   5 +-
 .../aws-s3-to-log-uri-based.groovy}                |  16 +-
 test/aws-s3/aws-s3-uri-binding.feature             |  35 ++++
 .../aws-s3-uri-binding.yaml}                       |  29 +--
 test/{aws-ddb-sink => aws-s3}/yaks-config.yaml     |  24 ++-
 .../aws-ddb-sink-exp}/amazonDDBClient.groovy       |   0
 .../aws-ddb-sink-exp}/aws-ddb-sink-binding.yaml    |   4 +-
 .../aws-ddb-sink-deleteItem.feature                |  12 +-
 .../aws-ddb-sink-exp}/aws-ddb-sink-putItem.feature |  12 +-
 .../aws-ddb-sink-updateItem.feature                |  12 +-
 .../aws-ddb-sink-exp}/putItem.groovy               |   0
 .../aws-ddb-sink-exp}/verifyItems.groovy           |   0
 .../aws-ddb-sink-exp}/yaks-config.yaml             |   0
 .../aws-s3-exp/amazonS3Client.groovy}              |  27 +--
 .../aws-s3-exp/aws-s3-cloudevents.feature          |  55 ++++++
 .../experimental/aws-s3-exp/aws-s3-knative.feature |  55 ++++++
 .../aws-s3-exp/aws-s3-to-knative.yaml}             |  33 ++--
 .../aws-s3-exp}/yaks-config.yaml                   |  19 +-
 test/rest-openapi-sink/rest-openapi-sink.feature   |   2 +-
 .../utils/knative-channel-to-log.yaml              |  21 +--
 .../utils/knative-to-log.yaml                      |  23 ++-
 74 files changed, 2706 insertions(+), 367 deletions(-)
 copy test/earthquake-source/yaks-config.yaml => .github/actions/install-knative/action.yml (76%)
 create mode 100755 .github/actions/install-knative/install-knative.sh
 copy library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml => kamelets/aws-ddb-experimental-sink.kamelet.yaml (86%)
 copy kamelets/{aws-s3-source.kamelet.yaml => aws-s3-experimental-source.kamelet.yaml} (87%)
 create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java
 create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java
 create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java
 create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java
 create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
 copy library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/{transform/aws/ddb/JsonToDdbModelConverter.java => format/converter/aws2/ddb/Ddb2JsonInputType.java} (68%)
 create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
 copy library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/{serialization/InflightAvroSchemaResolver.java => format/converter/standard/BinaryDataType.java} (55%)
 create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java
 copy library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/{transform/kafka/ManualCommit.java => format/converter/standard/StringDataType.java} (56%)
 create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java
 create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java
 copy library/{camel-kamelets-catalog/src/main/java/org/apache/camel/kamelets/catalog/model/KameletLabelNames.java => camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java} (65%)
 create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java
 create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java
 copy docs/source-watch.yml => library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter (77%)
 copy test/salesforce-sink/delete-secret.sh => library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-ddb-json (84%)
 copy test/salesforce-sink/delete-secret.sh => library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-cloudevents (84%)
 copy test/salesforce-sink/delete-secret.sh => library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-binary (84%)
 copy test/salesforce-sink/delete-secret.sh => library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject (84%)
 copy test/salesforce-sink/delete-secret.sh => library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-string (84%)
 create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java
 create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java
 create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java
 copy library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/{transform/aws/ddb/JsonToDdbModelConverterTest.java => format/converter/aws2/ddb/Ddb2JsonInputTypeTest.java} (65%)
 create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
 create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/BinaryDataTypeTest.java
 create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java
 create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/StringDataTypeTest.java
 copy library/{camel-kamelets-catalog/src/main/java/org/apache/camel/kamelets/catalog/model/KameletTypeEnum.java => camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/test/UppercaseDataType.java} (62%)
 copy test/salesforce-sink/delete-secret.sh => library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/DataTypeConverter (84%)
 copy test/salesforce-sink/delete-secret.sh => library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject (84%)
 copy test/salesforce-sink/delete-secret.sh => library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-lowercase (84%)
 copy test/salesforce-sink/delete-secret.sh => library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/foo-json (83%)
 create mode 100644 library/camel-kamelets-utils/src/test/resources/log4j2-test.xml
 copy library/camel-kamelets/src/main/resources/kamelets/{aws-ddb-sink.kamelet.yaml => aws-ddb-experimental-sink.kamelet.yaml} (86%)
 copy kamelets/aws-s3-source.kamelet.yaml => library/camel-kamelets/src/main/resources/kamelets/aws-s3-experimental-source.kamelet.yaml (87%)
 create mode 100644 test/aws-s3/README.md
 copy test/{aws-ddb-sink/amazonDDBClient.groovy => aws-s3/amazonS3Client.groovy} (57%)
 create mode 100644 test/aws-s3/aws-s3-credentials.properties
 create mode 100644 test/aws-s3/aws-s3-knative-binding.feature
 create mode 100644 test/aws-s3/aws-s3-source-property-conf.feature
 create mode 100644 test/aws-s3/aws-s3-source-secret-conf.feature
 create mode 100644 test/aws-s3/aws-s3-source-uri-conf.feature
 copy test/{aws-ddb-sink/aws-ddb-sink-binding.yaml => aws-s3/aws-s3-to-knative-channel.yaml} (75%)
 copy test/{aws-ddb-sink/verifyItems.groovy => aws-s3/aws-s3-to-log-secret-based.groovy} (87%)
 copy test/{mail-sink/mail-server.groovy => aws-s3/aws-s3-to-log-uri-based.groovy} (59%)
 create mode 100644 test/aws-s3/aws-s3-uri-binding.feature
 copy test/{aws-ddb-sink/aws-ddb-sink-binding.yaml => aws-s3/aws-s3-uri-binding.yaml} (75%)
 copy test/{aws-ddb-sink => aws-s3}/yaks-config.yaml (74%)
 copy test/{aws-ddb-sink => experimental/aws-ddb-sink-exp}/amazonDDBClient.groovy (100%)
 copy test/{aws-ddb-sink => experimental/aws-ddb-sink-exp}/aws-ddb-sink-binding.yaml (95%)
 copy test/{aws-ddb-sink => experimental/aws-ddb-sink-exp}/aws-ddb-sink-deleteItem.feature (86%)
 copy test/{aws-ddb-sink => experimental/aws-ddb-sink-exp}/aws-ddb-sink-putItem.feature (86%)
 copy test/{aws-ddb-sink => experimental/aws-ddb-sink-exp}/aws-ddb-sink-updateItem.feature (88%)
 copy test/{aws-ddb-sink => experimental/aws-ddb-sink-exp}/putItem.groovy (100%)
 copy test/{aws-ddb-sink => experimental/aws-ddb-sink-exp}/verifyItems.groovy (100%)
 copy test/{aws-ddb-sink => experimental/aws-ddb-sink-exp}/yaks-config.yaml (100%)
 copy test/{aws-ddb-sink/amazonDDBClient.groovy => experimental/aws-s3-exp/amazonS3Client.groovy} (57%)
 create mode 100644 test/experimental/aws-s3-exp/aws-s3-cloudevents.feature
 create mode 100644 test/experimental/aws-s3-exp/aws-s3-knative.feature
 copy test/{aws-ddb-sink/aws-ddb-sink-binding.yaml => experimental/aws-s3-exp/aws-s3-to-knative.yaml} (75%)
 copy test/{aws-ddb-sink => experimental/aws-s3-exp}/yaks-config.yaml (81%)
 copy templates/bindings/camel-k/ceph-source-binding.yaml => test/utils/knative-channel-to-log.yaml (79%)
 copy templates/bindings/camel-k/ceph-source-binding.yaml => test/utils/knative-to-log.yaml (79%)


[camel-kamelets] 04/28: Add CloudEvent output type on AWS S3 Kamelet source

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit 96534b4fded6d900a93abbd7acaf8cd5c0c99618
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Thu Nov 17 17:09:49 2022 +0100

    Add CloudEvent output type on AWS S3 Kamelet source
---
 kamelets/aws-ddb-sink.kamelet.yaml                 |  2 +-
 kamelets/aws-s3-source.kamelet.yaml                |  2 +-
 library/camel-kamelets-utils/pom.xml               |  5 ++
 .../kamelets/utils/format/DataTypeProcessor.java   |  6 ++
 .../aws2/s3/AWS2S3CloudEventOutputType.java        | 62 ++++++++++++++
 .../converter/standard/JsonModelDataType.java      |  6 +-
 .../camel/datatype/converter/aws2-s3-cloudevents   | 18 ++++
 .../utils/format/DataTypeProcessorTest.java        | 98 ++++++++++++++++++++++
 .../DefaultDataTypeConverterResolverTest.java      |  3 +
 .../utils/format/DefaultDataTypeRegistryTest.java  |  6 ++
 .../s3/AWS2S3CloudEventOutputTypeTest.java}        | 58 +++++--------
 .../converter/standard/JsonModelDataTypeTest.java  |  2 +-
 .../format/converter/test/UppercaseDataType.java   | 31 +++++++
 .../services/org/apache/camel/DataTypeConverter    | 18 ++++
 .../camel/datatype/converter/camel-lowercase       | 18 ++++
 .../resources/kamelets/aws-ddb-sink.kamelet.yaml   |  2 +-
 .../resources/kamelets/aws-s3-source.kamelet.yaml  |  2 +-
 test/aws-s3/aws-s3-uri-binding.yaml                |  2 +-
 18 files changed, 295 insertions(+), 46 deletions(-)

diff --git a/kamelets/aws-ddb-sink.kamelet.yaml b/kamelets/aws-ddb-sink.kamelet.yaml
index a4e7a114..952ecfa1 100644
--- a/kamelets/aws-ddb-sink.kamelet.yaml
+++ b/kamelets/aws-ddb-sink.kamelet.yaml
@@ -124,7 +124,7 @@ spec:
         - key: format
           value: '{{inputFormat}}'
         - key: registry
-          value: '{{dataTypeRegistry}}'
+          value: '#bean:{{dataTypeRegistry}}'
     from:
       uri: "kamelet:source"
       steps:
diff --git a/kamelets/aws-s3-source.kamelet.yaml b/kamelets/aws-s3-source.kamelet.yaml
index a63af7dc..d937f6e5 100644
--- a/kamelets/aws-s3-source.kamelet.yaml
+++ b/kamelets/aws-s3-source.kamelet.yaml
@@ -130,7 +130,7 @@ spec:
           - key: format
             value: '{{outputFormat}}'
           - key: registry
-            value: '{{dataTypeRegistry}}'
+            value: '#bean:{{dataTypeRegistry}}'
       - name: renameHeaders
         type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders"
         property:
diff --git a/library/camel-kamelets-utils/pom.xml b/library/camel-kamelets-utils/pom.xml
index 5b1441f3..2aba210d 100644
--- a/library/camel-kamelets-utils/pom.xml
+++ b/library/camel-kamelets-utils/pom.xml
@@ -82,6 +82,11 @@
             <artifactId>camel-aws2-s3</artifactId>
             <scope>provided</scope>
         </dependency>
+        <dependency>
+            <groupId>org.apache.camel</groupId>
+            <artifactId>camel-cloudevents</artifactId>
+            <!--<scope>provided</scope>-->
+        </dependency>
 
         <!-- Test scoped dependencies -->
         <dependency>
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java
index 81c58330..def0f2b8 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java
@@ -31,6 +31,8 @@ import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
  */
 public class DataTypeProcessor implements Processor, CamelContextAware {
 
+    public static final String DATA_TYPE_FORMAT_PROPERTY = "CamelDataTypeFormat";
+
     private CamelContext camelContext;
 
     private DefaultDataTypeRegistry registry;
@@ -42,6 +44,10 @@ public class DataTypeProcessor implements Processor, CamelContextAware {
 
     @Override
     public void process(Exchange exchange) throws Exception {
+        if (exchange.hasProperties() && exchange.getProperties().containsKey(DATA_TYPE_FORMAT_PROPERTY)) {
+            format = exchange.getProperty(DATA_TYPE_FORMAT_PROPERTY, String.class);
+        }
+
         if (format == null || format.isEmpty()) {
             return;
         }
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
new file mode 100644
index 00000000..655a4cef
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format.converter.aws2.s3;
+
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.Map;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.component.aws2.s3.AWS2S3Constants;
+import org.apache.camel.component.cloudevents.CloudEvent;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
+
+/**
+ * Output data type represents AWS S3 get object response as CloudEvent V1. The data type sets Camel specific
+ * CloudEvent headers on the exchange.
+ */
+@DataType(scheme = "aws2-s3", name = "cloudevents")
+public class AWS2S3CloudEventOutputType implements DataTypeConverter {
+
+    @Override
+    public void convert(Exchange exchange) {
+        final Map<String, Object> headers = exchange.getMessage().getHeaders();
+
+        headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TYPE, "kamelet:aws-s3-source");
+        headers.put(CloudEvent.CAMEL_CLOUD_EVENT_SOURCE, exchange.getMessage().getHeader(AWS2S3Constants.BUCKET_NAME, String.class));
+        headers.put(CloudEvent.CAMEL_CLOUD_EVENT_SUBJECT, exchange.getMessage().getHeader(AWS2S3Constants.KEY, String.class));
+        headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TIME, getEventTime(exchange));
+        headers.put(CloudEvent.CAMEL_CLOUD_EVENT_DATA_CONTENT_TYPE, exchange.getMessage().getHeader(AWS2S3Constants.CONTENT_TYPE, String.class));
+
+        String encoding = exchange.getMessage().getHeader(AWS2S3Constants.CONTENT_ENCODING, String.class);
+        if (encoding != null) {
+            headers.put(CloudEvent.CAMEL_CLOUD_EVENT_DATA_CONTENT_ENCODING, encoding);
+        }
+
+        exchange.getMessage().removeHeaders("CamelAwsS3*");
+    }
+
+    private String getEventTime(Exchange exchange) {
+        final ZonedDateTime created
+                = ZonedDateTime.ofInstant(Instant.ofEpochMilli(exchange.getCreated()), ZoneId.systemDefault());
+        return DateTimeFormatter.ISO_INSTANT.format(created);
+    }
+}
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java
index 047e6dd5..d8d4ca4e 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java
@@ -36,15 +36,15 @@ import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
 @DataType(name = "jsonObject")
 public class JsonModelDataType implements DataTypeConverter {
 
-    public static final String JSON_DATA_TYPE_KEY = "CamelJsonModelDataType";
+    public static final String DATA_TYPE_MODEL_PROPERTY = "CamelDataTypeModel";
 
     @Override
     public void convert(Exchange exchange) {
-        if (!exchange.hasProperties() || !exchange.getProperties().containsKey(JSON_DATA_TYPE_KEY)) {
+        if (!exchange.hasProperties() || !exchange.getProperties().containsKey(DATA_TYPE_MODEL_PROPERTY)) {
             return;
         }
 
-        String type = exchange.getProperty(JSON_DATA_TYPE_KEY, String.class);
+        String type = exchange.getProperty(DATA_TYPE_MODEL_PROPERTY, String.class);
         try (JacksonDataFormat dataFormat = new JacksonDataFormat(new ObjectMapper(), Class.forName(type))) {
             Object unmarshalled = dataFormat.unmarshal(exchange, getBodyAsStream(exchange));
             exchange.getMessage().setBody(unmarshalled);
diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-cloudevents b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-cloudevents
new file mode 100644
index 00000000..fafdd926
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-cloudevents
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+class=org.apache.camel.kamelets.utils.format.converter.aws2.s3.AWS2S3CloudEventOutputType
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java
new file mode 100644
index 00000000..0140b6f9
--- /dev/null
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format;
+
+import java.io.ByteArrayInputStream;
+import java.nio.charset.StandardCharsets;
+
+import org.apache.camel.CamelContextAware;
+import org.apache.camel.Exchange;
+import org.apache.camel.impl.DefaultCamelContext;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.support.DefaultExchange;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+class DataTypeProcessorTest {
+
+    private final DefaultCamelContext camelContext = new DefaultCamelContext();
+
+    private final DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry();
+
+    private final DataTypeProcessor processor = new DataTypeProcessor();
+
+    @BeforeEach
+    void setup() {
+        CamelContextAware.trySetCamelContext(processor, camelContext);
+        CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext);
+        processor.setRegistry(dataTypeRegistry);
+    }
+
+    @Test
+    public void shouldApplyDataTypeConverterFromAnnotationLookup() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.getMessage().setBody(new ByteArrayInputStream("Test".getBytes(StandardCharsets.UTF_8)));
+        processor.setFormat("uppercase");
+        processor.process(exchange);
+
+        assertEquals(String.class, exchange.getMessage().getBody().getClass());
+        assertEquals("TEST", exchange.getMessage().getBody());
+    }
+
+    @Test
+    public void shouldApplyDataTypeConverterFromResourceLookup() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.getMessage().setBody(new ByteArrayInputStream("Test".getBytes(StandardCharsets.UTF_8)));
+        processor.setFormat("lowercase");
+        processor.process(exchange);
+
+        assertEquals(String.class, exchange.getMessage().getBody().getClass());
+        assertEquals("test", exchange.getMessage().getBody());
+    }
+
+    @Test
+    public void shouldHandleUnknownDataType() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.getMessage().setBody(new ByteArrayInputStream("Test".getBytes(StandardCharsets.UTF_8)));
+        processor.setScheme("foo");
+        processor.setFormat("unknown");
+        processor.process(exchange);
+
+        assertEquals(ByteArrayInputStream.class, exchange.getMessage().getBody().getClass());
+        assertEquals("Test", exchange.getMessage().getBody(String.class));
+    }
+
+    public static class LowercaseDataType implements DataTypeConverter {
+
+        @Override
+        public void convert(Exchange exchange) {
+            exchange.getMessage().setBody(exchange.getMessage().getBody(String.class).toLowerCase());
+        }
+
+        @Override
+        public String getName() {
+            return "lowercase";
+        }
+    }
+
+}
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java
index 1972b047..b281f314 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java
@@ -56,6 +56,9 @@ class DefaultDataTypeConverterResolverTest {
         converter = resolver.resolve("foo", "json", camelContext);
         Assertions.assertTrue(converter.isPresent());
         Assertions.assertEquals(FooConverter.class, converter.get().getClass());
+
+        converter = resolver.resolve("camel", "lowercase", camelContext);
+        Assertions.assertTrue(converter.isPresent());
     }
 
     public static class FooConverter implements DataTypeConverter {
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java
index e077b369..c72e7897 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java
@@ -22,6 +22,7 @@ import java.util.Optional;
 import org.apache.camel.CamelContextAware;
 import org.apache.camel.impl.DefaultCamelContext;
 import org.apache.camel.kamelets.utils.format.converter.standard.JsonModelDataType;
+import org.apache.camel.kamelets.utils.format.converter.test.UppercaseDataType;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeEach;
@@ -49,6 +50,11 @@ class DefaultDataTypeRegistryTest {
         Assertions.assertTrue(converter.isPresent());
         Assertions.assertEquals(DefaultDataTypeConverter.class, converter.get().getClass());
         Assertions.assertEquals(byte[].class, ((DefaultDataTypeConverter) converter.get()).getType());
+        converter = dataTypeRegistry.lookup( "lowercase");
+        Assertions.assertTrue(converter.isPresent());
+        converter = dataTypeRegistry.lookup( "uppercase");
+        Assertions.assertTrue(converter.isPresent());
+        Assertions.assertEquals(UppercaseDataType.class, converter.get().getClass());
     }
 
 }
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
similarity index 55%
copy from library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java
copy to library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
index c175cc6d..10c51708 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
@@ -15,13 +15,16 @@
  * limitations under the License.
  */
 
-package org.apache.camel.kamelets.utils.format.converter.standard;
+package org.apache.camel.kamelets.utils.format.converter.aws2.s3;
 
+import java.io.ByteArrayInputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.Optional;
 
-import com.fasterxml.jackson.annotation.JsonProperty;
 import org.apache.camel.CamelContextAware;
 import org.apache.camel.Exchange;
+import org.apache.camel.component.aws2.s3.AWS2S3Constants;
+import org.apache.camel.component.cloudevents.CloudEvents;
 import org.apache.camel.impl.DefaultCamelContext;
 import org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
@@ -31,54 +34,35 @@ import org.junit.jupiter.api.Test;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
-public class JsonModelDataTypeTest {
+class AWS2S3CloudEventOutputTypeTest {
 
     private final DefaultCamelContext camelContext = new DefaultCamelContext();
 
-    private final JsonModelDataType dataType = new JsonModelDataType();
+    private final AWS2S3CloudEventOutputType outputType = new AWS2S3CloudEventOutputType();
 
     @Test
-    void shouldMapFromStringToJsonModel() throws Exception {
+    void shouldMapToCloudEvent() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
-        exchange.setProperty(JsonModelDataType.JSON_DATA_TYPE_KEY, Person.class.getName());
-        exchange.getMessage().setBody("{ \"name\": \"Sheldon\", \"age\": 29}");
-        dataType.convert(exchange);
-
-        assertEquals(Person.class, exchange.getMessage().getBody().getClass());
-        assertEquals("Sheldon", exchange.getMessage().getBody(Person.class).getName());
+        exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test1.txt");
+        exchange.getMessage().setHeader(AWS2S3Constants.BUCKET_NAME, "myBucket");
+        exchange.getMessage().setHeader(AWS2S3Constants.CONTENT_TYPE, "text/plain");
+        exchange.getMessage().setHeader(AWS2S3Constants.CONTENT_ENCODING, StandardCharsets.UTF_8.toString());
+        exchange.getMessage().setBody(new ByteArrayInputStream("Test1".getBytes(StandardCharsets.UTF_8)));
+        outputType.convert(exchange);
+
+        Assertions.assertTrue(exchange.getMessage().hasHeaders());
+        Assertions.assertFalse(exchange.getMessage().getHeaders().containsKey(AWS2S3Constants.KEY));
+        assertEquals("kamelet:aws-s3-source", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_TYPE));
+        assertEquals("test1.txt", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SUBJECT));
+        assertEquals("myBucket", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SOURCE));
     }
 
     @Test
     public void shouldLookupDataType() throws Exception {
         DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry();
         CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext);
-        Optional<DataTypeConverter> converter = dataTypeRegistry.lookup("jsonObject");
+        Optional<DataTypeConverter> converter = dataTypeRegistry.lookup("aws2-s3", "cloudevents");
         Assertions.assertTrue(converter.isPresent());
     }
-
-    public static class Person {
-        @JsonProperty
-        private String name;
-
-        @JsonProperty
-        private Long age;
-
-        public String getName() {
-            return name;
-        }
-
-        public void setName(String name) {
-            this.name = name;
-        }
-
-        public Long getAge() {
-            return age;
-        }
-
-        public void setAge(Long age) {
-            this.age = age;
-        }
-    }
-
 }
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java
index c175cc6d..d93da234 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java
@@ -41,7 +41,7 @@ public class JsonModelDataTypeTest {
     void shouldMapFromStringToJsonModel() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
-        exchange.setProperty(JsonModelDataType.JSON_DATA_TYPE_KEY, Person.class.getName());
+        exchange.setProperty(JsonModelDataType.DATA_TYPE_MODEL_PROPERTY, Person.class.getName());
         exchange.getMessage().setBody("{ \"name\": \"Sheldon\", \"age\": 29}");
         dataType.convert(exchange);
 
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/test/UppercaseDataType.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/test/UppercaseDataType.java
new file mode 100644
index 00000000..60604f73
--- /dev/null
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/test/UppercaseDataType.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format.converter.test;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
+
+@DataType(name = "uppercase")
+public class UppercaseDataType implements DataTypeConverter {
+
+    @Override
+    public void convert(Exchange exchange) {
+        exchange.getMessage().setBody(exchange.getMessage().getBody(String.class).toUpperCase());
+    }
+}
diff --git a/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/DataTypeConverter b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/DataTypeConverter
new file mode 100644
index 00000000..bf3aaf0d
--- /dev/null
+++ b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/DataTypeConverter
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+org.apache.camel.kamelets.utils.format.converter.test
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-lowercase b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-lowercase
new file mode 100644
index 00000000..b140a56b
--- /dev/null
+++ b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-lowercase
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+class=org.apache.camel.kamelets.utils.format.DataTypeProcessorTest$LowercaseDataType
\ No newline at end of file
diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml
index a4e7a114..952ecfa1 100644
--- a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml
+++ b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml
@@ -124,7 +124,7 @@ spec:
         - key: format
           value: '{{inputFormat}}'
         - key: registry
-          value: '{{dataTypeRegistry}}'
+          value: '#bean:{{dataTypeRegistry}}'
     from:
       uri: "kamelet:source"
       steps:
diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml
index a63af7dc..d937f6e5 100644
--- a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml
+++ b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml
@@ -130,7 +130,7 @@ spec:
           - key: format
             value: '{{outputFormat}}'
           - key: registry
-            value: '{{dataTypeRegistry}}'
+            value: '#bean:{{dataTypeRegistry}}'
       - name: renameHeaders
         type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders"
         property:
diff --git a/test/aws-s3/aws-s3-uri-binding.yaml b/test/aws-s3/aws-s3-uri-binding.yaml
index 50522818..14d420f9 100644
--- a/test/aws-s3/aws-s3-uri-binding.yaml
+++ b/test/aws-s3/aws-s3-uri-binding.yaml
@@ -28,7 +28,7 @@ spec:
     properties:
       bucketNameOrArn: ${aws.s3.bucketNameOrArn}
       overrideEndpoint: true
-      outputFormat: json
+      outputFormat: cloudevents
       uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}
       accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}
       secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}


[camel-kamelets] 13/28: Enhance YAKS tests with AWS S3 data type test

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit c230f2ad90f180daa3128ad8aa5ac715f36e0e4b
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Mon Nov 21 16:33:00 2022 +0100

    Enhance YAKS tests with AWS S3 data type test
---
 test/aws-s3/README.md                              |  2 +-
 test/aws-s3/aws-s3-data-type.feature               | 48 ++++++++++++++++++++++
 ...ding.feature => aws-s3-knative-binding.feature} | 28 ++++++-------
 ...aws-s3-to-inmem.yaml => aws-s3-to-knative.yaml} | 16 ++++++--
 test/aws-s3/aws-s3-uri-binding.yaml                |  1 -
 test/aws-s3/yaks-config.yaml                       |  9 +++-
 .../{inmem-to-log.yaml => knative-to-log.yaml}     | 17 +++++---
 7 files changed, 93 insertions(+), 28 deletions(-)

diff --git a/test/aws-s3/README.md b/test/aws-s3/README.md
index 6e7d7315..e71f403f 100644
--- a/test/aws-s3/README.md
+++ b/test/aws-s3/README.md
@@ -70,7 +70,7 @@ $ yaks test aws-s3-uri-binding.feature
 To run tests with binding to Knative channel:
 
 ```shell script
-$ yaks test aws-s3-inmem-binding.feature
+$ yaks test aws-s3-knative-binding.feature
 ```
 
 You will be provided with the test log output and the test results.
diff --git a/test/aws-s3/aws-s3-data-type.feature b/test/aws-s3/aws-s3-data-type.feature
new file mode 100644
index 00000000..3ec04bde
--- /dev/null
+++ b/test/aws-s3/aws-s3-data-type.feature
@@ -0,0 +1,48 @@
+Feature: AWS S3 Kamelet - output data type
+
+  Background:
+    Given Knative event consumer timeout is 20000 ms
+    Given Camel K resource polling configuration
+      | maxAttempts          | 200   |
+      | delayBetweenAttempts | 4000  |
+    Given variables
+      | aws.s3.output | cloudevents |
+      | aws.s3.bucketNameOrArn | mybucket |
+      | aws.s3.message | Hello from S3 Kamelet |
+      | aws.s3.key | hello.txt |
+
+  Scenario: Start LocalStack container
+    Given Enable service S3
+    Given start LocalStack container
+    And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}'
+
+  Scenario: Create AWS-S3 client
+    Given New global Camel context
+    Given load to Camel registry amazonS3Client.groovy
+
+  Scenario: Create AWS-S3 Kamelet to Knative binding
+    Given variable loginfo is "Installed features"
+    When load KameletBinding aws-s3-to-knative.yaml
+    And KameletBinding aws-s3-to-knative is available
+    And Camel K integration aws-s3-to-knative is running
+    Then Camel K integration aws-s3-to-knative should print ${loginfo}
+
+  Scenario: Verify Kamelet source
+    Given create Knative event consumer service event-consumer-service
+    Given create Knative trigger event-service-trigger on service event-consumer-service with filter on attributes
+      | type   | org.apache.camel.event |
+    Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}"
+    Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message}
+    Then expect Knative event data: ${aws.s3.message}
+    And verify Knative event
+      | type            | org.apache.camel.event |
+      | source          | @ignore@ |
+      | subject         | @ignore@ |
+      | id              | @ignore@ |
+
+  Scenario: Remove Camel K resources
+    Given delete KameletBinding aws-s3-to-knative
+    Given delete Kubernetes service event-consumer-service
+
+  Scenario: Stop container
+    Given stop LocalStack container
diff --git a/test/aws-s3/aws-s3-inmem-binding.feature b/test/aws-s3/aws-s3-knative-binding.feature
similarity index 58%
rename from test/aws-s3/aws-s3-inmem-binding.feature
rename to test/aws-s3/aws-s3-knative-binding.feature
index d67e7798..c143bbee 100644
--- a/test/aws-s3/aws-s3-inmem-binding.feature
+++ b/test/aws-s3/aws-s3-knative-binding.feature
@@ -1,5 +1,5 @@
 @knative
-Feature: AWS S3 Kamelet - binding to InMemoryChannel
+Feature: AWS S3 Kamelet - binding to Knative
 
   Background:
     Given Kamelet aws-s3-source is available
@@ -17,33 +17,31 @@ Feature: AWS S3 Kamelet - binding to InMemoryChannel
     Given New global Camel context
     Given load to Camel registry amazonS3Client.groovy
 
-  Scenario: Create Knative broker and channel
+  Scenario: Create Knative broker
     Given create Knative broker default
     And Knative broker default is running
-    Given create Knative channel messages
 
   Scenario: Create AWS-S3 Kamelet to InMemoryChannel binding
     Given variable loginfo is "Installed features"
-    Given load KameletBinding aws-s3-to-inmem.yaml
-    Given load KameletBinding inmem-to-log.yaml
-    Then KameletBinding aws-s3-to-inmem should be available
-    And KameletBinding inmem-to-log should be available
-    And Camel K integration aws-s3-to-inmem is running
-    And Camel K integration inmem-to-log is running
-    And Camel K integration aws-s3-to-inmem should print ${loginfo}
-    And Camel K integration inmem-to-log should print ${loginfo}
+    Given load KameletBinding aws-s3-to-knative.yaml
+    Given load KameletBinding knative-to-log.yaml
+    Then KameletBinding aws-s3-to-knative should be available
+    And KameletBinding knative-to-log should be available
+    And Camel K integration aws-s3-to-knative is running
+    And Camel K integration knative-to-log is running
+    And Camel K integration aws-s3-to-knative should print ${loginfo}
+    And Camel K integration knative-to-log should print ${loginfo}
     Then sleep 10000 ms
 
   Scenario: Verify Kamelet source
     Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}"
     Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message}
-    Then Camel K integration inmem-to-log should print ${aws.s3.message}
+    Then Camel K integration knative-to-log should print ${aws.s3.message}
 
   Scenario: Remove resources
-    Given delete KameletBinding aws-s3-to-inmem
-    Given delete KameletBinding inmem-to-log
+    Given delete KameletBinding aws-s3-to-knative
+    Given delete KameletBinding knative-to-log
     Given delete Knative broker default
-    Given delete Knative channel messages
 
   Scenario: Stop container
     Given stop LocalStack container
diff --git a/test/aws-s3/aws-s3-to-inmem.yaml b/test/aws-s3/aws-s3-to-knative.yaml
similarity index 83%
rename from test/aws-s3/aws-s3-to-inmem.yaml
rename to test/aws-s3/aws-s3-to-knative.yaml
index ce880028..e99ee20f 100644
--- a/test/aws-s3/aws-s3-to-inmem.yaml
+++ b/test/aws-s3/aws-s3-to-knative.yaml
@@ -18,7 +18,7 @@
 apiVersion: camel.apache.org/v1alpha1
 kind: KameletBinding
 metadata:
-  name: aws-s3-to-inmem
+  name: aws-s3-to-knative
 spec:
   source:
     ref:
@@ -28,12 +28,20 @@ spec:
     properties:
       bucketNameOrArn: ${aws.s3.bucketNameOrArn}
       overrideEndpoint: true
+      outputFormat: ${aws.s3.output}
       uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}
       accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}
       secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}
       region: ${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}
+  steps:
+    - ref:
+        kind: Kamelet
+        apiVersion: camel.apache.org/v1alpha1
+        name: log-sink
+      properties:
+        showHeaders: true
   sink:
     ref:
-      kind: InMemoryChannel
-      apiVersion: messaging.knative.dev/v1
-      name: messages
+      kind: Broker
+      apiVersion: eventing.knative.dev/v1
+      name: default
diff --git a/test/aws-s3/aws-s3-uri-binding.yaml b/test/aws-s3/aws-s3-uri-binding.yaml
index e21d54f4..b3612219 100644
--- a/test/aws-s3/aws-s3-uri-binding.yaml
+++ b/test/aws-s3/aws-s3-uri-binding.yaml
@@ -28,7 +28,6 @@ spec:
     properties:
       bucketNameOrArn: ${aws.s3.bucketNameOrArn}
       overrideEndpoint: true
-      outputFormat: cloudevents
       uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}
       accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}
       secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}
diff --git a/test/aws-s3/yaks-config.yaml b/test/aws-s3/yaks-config.yaml
index f36d136c..0d70ba75 100644
--- a/test/aws-s3/yaks-config.yaml
+++ b/test/aws-s3/yaks-config.yaml
@@ -42,12 +42,17 @@ config:
       - aws-s3-to-log-uri-based.groovy
       - aws-s3-to-log-secret-based.groovy
       - aws-s3-uri-binding.yaml
-      - aws-s3-to-inmem.yaml
-      - ../utils/inmem-to-log.yaml
+      - aws-s3-to-knative.yaml
+      - ../utils/knative-to-log.yaml
     cucumber:
       tags:
         - "not @ignored"
     settings:
+      loggers:
+        - name: Logger.Message_IN
+          level: DEBUG
+        - name: Logger.Message_OUT
+          level: DEBUG
       dependencies:
         - groupId: com.amazonaws
           artifactId: aws-java-sdk-kinesis
diff --git a/test/utils/inmem-to-log.yaml b/test/utils/knative-to-log.yaml
similarity index 78%
rename from test/utils/inmem-to-log.yaml
rename to test/utils/knative-to-log.yaml
index 8b5dc51e..c03e6de2 100644
--- a/test/utils/inmem-to-log.yaml
+++ b/test/utils/knative-to-log.yaml
@@ -18,12 +18,19 @@
 apiVersion: camel.apache.org/v1alpha1
 kind: KameletBinding
 metadata:
-  name: inmem-to-log
+  name: knative-to-log
 spec:
   source:
     ref:
-      kind: InMemoryChannel
-      apiVersion: messaging.knative.dev/v1
-      name: messages
+      kind: Broker
+      apiVersion: eventing.knative.dev/v1
+      name: default
+    properties:
+      type: org.apache.camel.event
   sink:
-    uri: log:info
+    ref:
+      kind: Kamelet
+      apiVersion: camel.apache.org/v1alpha1
+      name: log-sink
+    properties:
+      showHeaders: true


[camel-kamelets] 24/28: Simplify Json model data type

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit 00ccfe7e4fb16dfe8aa692d9579bbec2e72ca310
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Wed Nov 30 15:56:53 2022 +0100

    Simplify Json model data type
    
    - Remove JacksonDataFormat in favor of using simple ObjectMapper instance
    - Reuse ObjectMapper instance for all exchanges processed by the data type
---
 .../converter/standard/JsonModelDataType.java      | 22 +++++++++++++++++-----
 .../converter/standard/JsonModelDataTypeTest.java  | 14 +++++++++++++-
 2 files changed, 30 insertions(+), 6 deletions(-)

diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java
index 0a80ee32..183f1112 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java
@@ -26,7 +26,6 @@ import org.apache.camel.CamelContextAware;
 import org.apache.camel.CamelExecutionException;
 import org.apache.camel.Exchange;
 import org.apache.camel.InvalidPayloadException;
-import org.apache.camel.component.jackson.JacksonDataFormat;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
 import org.apache.camel.util.ObjectHelper;
@@ -41,21 +40,30 @@ public class JsonModelDataType implements DataTypeConverter, CamelContextAware {
 
     public static final String DATA_TYPE_MODEL_PROPERTY = "CamelDataTypeModel";
 
+    private String model;
+
     private CamelContext camelContext;
 
     private static final ObjectMapper mapper = new ObjectMapper();
 
     @Override
     public void convert(Exchange exchange) {
-        if (!exchange.hasProperties() || !exchange.getProperties().containsKey(DATA_TYPE_MODEL_PROPERTY)) {
+        String type;
+        if (exchange.hasProperties() && exchange.getProperties().containsKey(DATA_TYPE_MODEL_PROPERTY)) {
+            type = exchange.getProperty(DATA_TYPE_MODEL_PROPERTY, String.class);
+        } else {
+            type = model;
+        }
+
+        if (type == null) {
+            // neither model property nor exchange property defines proper type - do nothing
             return;
         }
 
         ObjectHelper.notNull(camelContext, "camelContext");
 
-        String type = exchange.getProperty(DATA_TYPE_MODEL_PROPERTY, String.class);
-        try (JacksonDataFormat dataFormat = new JacksonDataFormat(mapper, camelContext.getClassResolver().resolveMandatoryClass(type))) {
-            Object unmarshalled = dataFormat.unmarshal(exchange, getBodyAsStream(exchange));
+        try {
+            Object unmarshalled = mapper.reader().forType(camelContext.getClassResolver().resolveMandatoryClass(type)).readValue(getBodyAsStream(exchange));
             exchange.getMessage().setBody(unmarshalled);
         } catch (Exception e) {
             throw new CamelExecutionException(
@@ -78,6 +86,10 @@ public class JsonModelDataType implements DataTypeConverter, CamelContextAware {
         return camelContext;
     }
 
+    public void setModel(String model) {
+        this.model = model;
+    }
+
     @Override
     public void setCamelContext(CamelContext camelContext) {
         this.camelContext = camelContext;
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java
index cb253a16..7785017c 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java
@@ -44,7 +44,19 @@ public class JsonModelDataTypeTest {
     }
 
     @Test
-    void shouldMapFromStringToJsonModel() throws Exception {
+    void shouldMapStringToJsonModelWithModelProperty() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.getMessage().setBody("{ \"name\": \"Rajesh\", \"age\": 28}");
+        dataType.setModel(Person.class.getName());
+        dataType.convert(exchange);
+
+        assertEquals(Person.class, exchange.getMessage().getBody().getClass());
+        assertEquals("Rajesh", exchange.getMessage().getBody(Person.class).getName());
+    }
+
+    @Test
+    void shouldMapStringToJsonModelWithExchangeProperty() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
         exchange.setProperty(JsonModelDataType.DATA_TYPE_MODEL_PROPERTY, Person.class.getName());


[camel-kamelets] 07/28: Make sure data type resolver works on all runtimes

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit eb6b71ef3fceb00d3d13838703047e43e25b4b18
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Fri Nov 18 15:17:34 2022 +0100

    Make sure data type resolver works on all runtimes
---
 .../format/DefaultDataTypeConverterResolver.java   | 42 ++++++----------------
 .../utils/format/DefaultDataTypeRegistry.java      |  7 ++--
 2 files changed, 15 insertions(+), 34 deletions(-)

diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java
index 5fdaa790..9d5e8b23 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java
@@ -23,7 +23,7 @@ import org.apache.camel.CamelContext;
 import org.apache.camel.ExtendedCamelContext;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverterResolver;
-import org.apache.camel.spi.FactoryFinder;
+import org.apache.camel.util.ObjectHelper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -34,51 +34,31 @@ import org.slf4j.LoggerFactory;
  */
 public class DefaultDataTypeConverterResolver implements DataTypeConverterResolver {
 
-    public static final String RESOURCE_PATH = "META-INF/services/org/apache/camel/datatype/converter/";
+    public static final String DATA_TYPE_CONVERTER_RESOURCE_PATH = "META-INF/services/org/apache/camel/datatype/converter/";
 
     private static final Logger LOG = LoggerFactory.getLogger(DefaultDataTypeConverterResolver.class);
 
-    private FactoryFinder factoryFinder;
-
     @Override
     public Optional<DataTypeConverter> resolve(String scheme, String name, CamelContext context) {
         String converterName = String.format("%s-%s", scheme, name);
 
         if (getLog().isDebugEnabled()) {
-            getLog().debug("Resolving data type converter {} via: {}{}", converterName, RESOURCE_PATH, converterName);
-        }
-
-        Class<?> type = findConverter(converterName, context);
-        if (type == null) {
-            // not found
-            return Optional.empty();
+            getLog().debug("Resolving data type converter {} via: {}{}", converterName, DATA_TYPE_CONVERTER_RESOURCE_PATH, converterName);
         }
 
-        if (getLog().isDebugEnabled()) {
+        Optional<DataTypeConverter> converter = findConverter(converterName, context);
+        if (getLog().isDebugEnabled() && converter.isPresent()) {
             getLog().debug("Found data type converter: {} via type: {} via: {}{}", converterName,
-                    type.getName(), factoryFinder.getResourcePath(), converterName);
+                    ObjectHelper.name(converter.getClass()), DATA_TYPE_CONVERTER_RESOURCE_PATH, converterName);
         }
 
-        // create the converter instance
-        if (DataTypeConverter.class.isAssignableFrom(type)) {
-            try {
-                return Optional.of((DataTypeConverter) context.getInjector().newInstance(type));
-            } catch (NoClassDefFoundError e) {
-                LOG.debug("Ignoring converter type: {} as a dependent class could not be found: {}",
-                        type.getCanonicalName(), e, e);
-            }
-        } else {
-            throw new IllegalArgumentException("Type is not a DataTypeConverter implementation. Found: " + type.getName());
-        }
-
-        return Optional.empty();
+        return converter;
     }
 
-    private Class<?> findConverter(String name, CamelContext context) {
-        if (factoryFinder == null) {
-            factoryFinder = context.adapt(ExtendedCamelContext.class).getFactoryFinder(RESOURCE_PATH);
-        }
-        return factoryFinder.findClass(name).orElse(null);
+    private Optional<DataTypeConverter> findConverter(String name, CamelContext context) {
+        return context.adapt(ExtendedCamelContext.class)
+                .getBootstrapFactoryFinder(DATA_TYPE_CONVERTER_RESOURCE_PATH)
+                .newInstance(name, DataTypeConverter.class);
     }
 
     protected Logger getLog() {
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
index 0e262d28..18effcc9 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
@@ -30,6 +30,7 @@ import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverterResolver;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeRegistry;
+import org.apache.camel.support.CamelContextHelper;
 import org.apache.camel.support.service.ServiceSupport;
 import org.apache.camel.util.ObjectHelper;
 import org.slf4j.Logger;
@@ -104,7 +105,7 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
             loader.load(this);
         }
 
-        LOG.debug("Loaded {} initial data type converters", dataTypeConverters.size());
+        LOG.debug("Loaded {} schemes holding {} data type converters", dataTypeConverters.size(), dataTypeConverters.values().stream().mapToInt(List::size).sum());
     }
 
     @Override
@@ -128,8 +129,8 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
         }
 
         // Looking for matching beans in Camel registry first
-        Optional<DataTypeConverter> dataTypeConverter = Optional.ofNullable(camelContext.getRegistry()
-                .lookupByNameAndType(String.format("%s-%s", scheme, name), DataTypeConverter.class));
+        Optional<DataTypeConverter> dataTypeConverter = Optional.ofNullable(CamelContextHelper.lookup(getCamelContext(),
+                String.format("%s-%s", scheme, name), DataTypeConverter.class));
 
         if (dataTypeConverter.isPresent()) {
             if (LOG.isDebugEnabled()) {


[camel-kamelets] 15/28: Enhance data type AWS S3 YAKS tests

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit 96f2c0d961a23dceacbf104fd666d0f41f5b1df9
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Tue Nov 22 12:35:45 2022 +0100

    Enhance data type AWS S3 YAKS tests
---
 .../{aws-s3-data-type.feature => aws-s3-cloudevents.feature}   | 10 +++++-----
 .../{aws-s3-data-type.feature => aws-s3-knative.feature}       |  5 ++---
 2 files changed, 7 insertions(+), 8 deletions(-)

diff --git a/test/aws-s3/aws-s3-data-type.feature b/test/aws-s3/aws-s3-cloudevents.feature
similarity index 88%
copy from test/aws-s3/aws-s3-data-type.feature
copy to test/aws-s3/aws-s3-cloudevents.feature
index 3ec04bde..1e2f7d1e 100644
--- a/test/aws-s3/aws-s3-data-type.feature
+++ b/test/aws-s3/aws-s3-cloudevents.feature
@@ -1,4 +1,4 @@
-Feature: AWS S3 Kamelet - output data type
+Feature: AWS S3 Kamelet - cloud events data type
 
   Background:
     Given Knative event consumer timeout is 20000 ms
@@ -30,14 +30,14 @@ Feature: AWS S3 Kamelet - output data type
   Scenario: Verify Kamelet source
     Given create Knative event consumer service event-consumer-service
     Given create Knative trigger event-service-trigger on service event-consumer-service with filter on attributes
-      | type   | org.apache.camel.event |
+      | type   | kamelet.aws.s3.source |
     Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}"
     Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message}
     Then expect Knative event data: ${aws.s3.message}
     And verify Knative event
-      | type            | org.apache.camel.event |
-      | source          | @ignore@ |
-      | subject         | @ignore@ |
+      | type            | kamelet.aws.s3.source |
+      | source          | ${aws.s3.bucketNameOrArn} |
+      | subject         | ${aws.s3.key} |
       | id              | @ignore@ |
 
   Scenario: Remove Camel K resources
diff --git a/test/aws-s3/aws-s3-data-type.feature b/test/aws-s3/aws-s3-knative.feature
similarity index 94%
rename from test/aws-s3/aws-s3-data-type.feature
rename to test/aws-s3/aws-s3-knative.feature
index 3ec04bde..148ec1d6 100644
--- a/test/aws-s3/aws-s3-data-type.feature
+++ b/test/aws-s3/aws-s3-knative.feature
@@ -1,4 +1,4 @@
-Feature: AWS S3 Kamelet - output data type
+Feature: AWS S3 Kamelet - Knative binding
 
   Background:
     Given Knative event consumer timeout is 20000 ms
@@ -6,7 +6,7 @@ Feature: AWS S3 Kamelet - output data type
       | maxAttempts          | 200   |
       | delayBetweenAttempts | 4000  |
     Given variables
-      | aws.s3.output | cloudevents |
+      | aws.s3.output | string |
       | aws.s3.bucketNameOrArn | mybucket |
       | aws.s3.message | Hello from S3 Kamelet |
       | aws.s3.key | hello.txt |
@@ -37,7 +37,6 @@ Feature: AWS S3 Kamelet - output data type
     And verify Knative event
       | type            | org.apache.camel.event |
       | source          | @ignore@ |
-      | subject         | @ignore@ |
       | id              | @ignore@ |
 
   Scenario: Remove Camel K resources


[camel-kamelets] 16/28: Add option to disable data type registry classpath scan

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit d62d2d0dd42b1e688ca3ea3eedeb876ab3cc9e7b
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Wed Nov 23 21:56:04 2022 +0100

    Add option to disable data type registry classpath scan
---
 .../camel/kamelets/utils/format/DefaultDataTypeRegistry.java   | 10 +++++++++-
 1 file changed, 9 insertions(+), 1 deletion(-)

diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
index 18effcc9..d393e6c7 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
@@ -53,6 +53,8 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
 
     private DataTypeConverterResolver dataTypeConverterResolver;
 
+    private boolean classpathScan = true;
+
     private final Map<String, List<DataTypeConverter>> dataTypeConverters = new HashMap<>();
 
     @Override
@@ -95,7 +97,9 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
     protected void doInit() throws Exception {
         super.doInit();
 
-        dataTypeLoaders.add(new AnnotationDataTypeLoader());
+        if (classpathScan) {
+            dataTypeLoaders.add(new AnnotationDataTypeLoader());
+        }
 
         addDataTypeConverter(new DefaultDataTypeConverter("string", String.class));
         addDataTypeConverter(new DefaultDataTypeConverter("binary", byte[].class));
@@ -171,6 +175,10 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
         return dataTypeConverters.computeIfAbsent(scheme, (s) -> new ArrayList<>());
     }
 
+    public void setClasspathScan(boolean classpathScan) {
+        this.classpathScan = classpathScan;
+    }
+
     @Override
     public CamelContext getCamelContext() {
         return camelContext;


[camel-kamelets] 02/28: Refine Kamelet data type solution with review comments

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit f71ca6e86b023a8c9ed417a2942898e28c0d2a24
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Thu Nov 17 12:06:45 2022 +0100

    Refine Kamelet data type solution with review comments
    
    - Cache converter in DataTypeProcessor so lookup is only done once
    - Add lazy loading of component converters via resource path lookup (DataTypeConverterResolver)
    - Only load standard converters via annotation package scan
---
 kamelets/aws-ddb-sink.kamelet.yaml                 |  2 +
 kamelets/aws-s3-source.kamelet.yaml                |  2 +
 .../utils/format/AnnotationDataTypeLoader.java     | 41 ++++++++---
 .../kamelets/utils/format/DataTypeProcessor.java   | 27 +++++--
 .../format/DefaultDataTypeConverterResolver.java   | 83 ++++++++++++++++++++++
 .../utils/format/DefaultDataTypeRegistry.java      | 64 ++++++++---------
 .../format/spi/DataTypeConverterResolver.java      | 49 +++++++++++++
 .../apache/camel/{DataType => DataTypeConverter}   |  4 +-
 .../{DataType => datatype/converter/aws2-ddb-json} |  4 +-
 .../converter/aws2-s3-binary}                      |  4 +-
 .../{DataType => datatype/converter/aws2-s3-json}  |  4 +-
 ...a => DefaultDataTypeConverterResolverTest.java} | 42 +++++++----
 .../utils/format/DefaultDataTypeRegistryTest.java  |  7 +-
 .../camel/datatype/converter/camel-jsonObject}     |  4 +-
 .../org/apache/camel/datatype/converter/foo-json}  |  4 +-
 .../resources/kamelets/aws-ddb-sink.kamelet.yaml   |  2 +
 .../resources/kamelets/aws-s3-source.kamelet.yaml  |  2 +
 17 files changed, 260 insertions(+), 85 deletions(-)

diff --git a/kamelets/aws-ddb-sink.kamelet.yaml b/kamelets/aws-ddb-sink.kamelet.yaml
index ba200347..a4e7a114 100644
--- a/kamelets/aws-ddb-sink.kamelet.yaml
+++ b/kamelets/aws-ddb-sink.kamelet.yaml
@@ -123,6 +123,8 @@ spec:
           value: 'aws2-ddb'
         - key: format
           value: '{{inputFormat}}'
+        - key: registry
+          value: '{{dataTypeRegistry}}'
     from:
       uri: "kamelet:source"
       steps:
diff --git a/kamelets/aws-s3-source.kamelet.yaml b/kamelets/aws-s3-source.kamelet.yaml
index e09cf4aa..a63af7dc 100644
--- a/kamelets/aws-s3-source.kamelet.yaml
+++ b/kamelets/aws-s3-source.kamelet.yaml
@@ -129,6 +129,8 @@ spec:
             value: 'aws2-s3'
           - key: format
             value: '{{outputFormat}}'
+          - key: registry
+            value: '{{dataTypeRegistry}}'
       - name: renameHeaders
         type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders"
         property:
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java
index 96ca50eb..9b37c377 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java
@@ -26,12 +26,15 @@ import java.util.Enumeration;
 import java.util.HashSet;
 import java.util.Set;
 
+import org.apache.camel.CamelContext;
+import org.apache.camel.CamelContextAware;
+import org.apache.camel.ExtendedCamelContext;
 import org.apache.camel.TypeConverterLoaderException;
+import org.apache.camel.impl.engine.DefaultPackageScanClassResolver;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeRegistry;
 import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
-import org.apache.camel.spi.Injector;
 import org.apache.camel.spi.PackageScanClassResolver;
 import org.apache.camel.util.IOHelper;
 import org.apache.camel.util.ObjectHelper;
@@ -41,25 +44,31 @@ import org.slf4j.LoggerFactory;
 /**
  * Data type loader scans packages for {@link DataTypeConverter} classes annotated with {@link DataType} annotation.
  */
-public class AnnotationDataTypeLoader implements DataTypeLoader {
+public class AnnotationDataTypeLoader implements DataTypeLoader, CamelContextAware {
 
-    public static final String META_INF_SERVICES = "META-INF/services/org/apache/camel/DataType";
+    public static final String META_INF_SERVICES = "META-INF/services/org/apache/camel/DataTypeConverter";
 
     private static final Logger LOG = LoggerFactory.getLogger(AnnotationDataTypeLoader.class);
 
-    protected final PackageScanClassResolver resolver;
-    protected final Injector injector;
+    private CamelContext camelContext;
+
+    protected PackageScanClassResolver resolver;
 
     protected Set<Class<?>> visitedClasses = new HashSet<>();
     protected Set<String> visitedURIs = new HashSet<>();
 
-    public AnnotationDataTypeLoader(Injector injector, PackageScanClassResolver resolver) {
-        this.injector = injector;
-        this.resolver = resolver;
-    }
-
     @Override
     public void load(DataTypeRegistry registry) {
+        ObjectHelper.notNull(camelContext, "camelContext");
+
+        if (resolver == null) {
+            if (camelContext instanceof ExtendedCamelContext) {
+                resolver = camelContext.adapt(ExtendedCamelContext.class).getPackageScanClassResolver();
+            } else {
+                resolver = new DefaultPackageScanClassResolver();
+            }
+        }
+
         Set<String> packages = new HashSet<>();
 
         LOG.trace("Searching for {} services", META_INF_SERVICES);
@@ -111,7 +120,7 @@ public class AnnotationDataTypeLoader implements DataTypeLoader {
         try {
             if (DataTypeConverter.class.isAssignableFrom(type) && type.isAnnotationPresent(DataType.class)) {
                 DataType dt = type.getAnnotation(DataType.class);
-                DataTypeConverter converter = (DataTypeConverter) injector.newInstance(type);
+                DataTypeConverter converter = (DataTypeConverter) camelContext.getInjector().newInstance(type);
                 registry.addDataTypeConverter(dt.scheme(), converter);
             }
         } catch (NoClassDefFoundError e) {
@@ -149,4 +158,14 @@ public class AnnotationDataTypeLoader implements DataTypeLoader {
             }
         }
     }
+
+    @Override
+    public void setCamelContext(CamelContext camelContext) {
+        this.camelContext = camelContext;
+    }
+
+    @Override
+    public CamelContext getCamelContext() {
+        return camelContext;
+    }
 }
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java
index 859269fe..81c58330 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java
@@ -17,11 +17,13 @@
 
 package org.apache.camel.kamelets.utils.format;
 
-import org.apache.camel.BeanInject;
+import java.util.Optional;
+
 import org.apache.camel.CamelContext;
 import org.apache.camel.CamelContextAware;
 import org.apache.camel.Exchange;
 import org.apache.camel.Processor;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 
 /**
  * Processor applies data type conversion based on given format name. Searches for matching data type converter
@@ -31,20 +33,31 @@ public class DataTypeProcessor implements Processor, CamelContextAware {
 
     private CamelContext camelContext;
 
-    @BeanInject
-    private DefaultDataTypeRegistry dataTypeRegistry;
+    private DefaultDataTypeRegistry registry;
 
     private String scheme;
     private String format;
 
+    private DataTypeConverter converter;
+
     @Override
     public void process(Exchange exchange) throws Exception {
         if (format == null || format.isEmpty()) {
             return;
         }
 
-        dataTypeRegistry.lookup(scheme, format)
-                        .ifPresent(converter -> converter.convert(exchange));
+        doConverterLookup().ifPresent(converter -> converter.convert(exchange));
+    }
+
+    private Optional<DataTypeConverter> doConverterLookup() {
+        if (converter != null) {
+            return Optional.of(converter);
+        }
+
+        Optional<DataTypeConverter> maybeConverter = registry.lookup(scheme, format);
+        maybeConverter.ifPresent(dataTypeConverter -> this.converter = dataTypeConverter);
+
+        return maybeConverter;
     }
 
     public void setFormat(String format) {
@@ -55,6 +68,10 @@ public class DataTypeProcessor implements Processor, CamelContextAware {
         this.scheme = scheme;
     }
 
+    public void setRegistry(DefaultDataTypeRegistry dataTypeRegistry) {
+        this.registry = dataTypeRegistry;
+    }
+
     @Override
     public CamelContext getCamelContext() {
         return camelContext;
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java
new file mode 100644
index 00000000..85444a28
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format;
+
+import java.util.Optional;
+
+import org.apache.camel.CamelContext;
+import org.apache.camel.ExtendedCamelContext;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverterResolver;
+import org.apache.camel.spi.FactoryFinder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * The default implementation of {@link DataTypeConverterResolver} which tries to find components by using the URI scheme prefix
+ * and searching for a file of the URI scheme name in the <b>META-INF/services/org/apache/camel/datatype/converter/</b> directory
+ * on the classpath.
+ */
+public class DefaultDataTypeConverterResolver implements DataTypeConverterResolver {
+
+    public static final String RESOURCE_PATH = "META-INF/services/org/apache/camel/datatype/converter/";
+
+    private static final Logger LOG = LoggerFactory.getLogger(DefaultDataTypeConverterResolver.class);
+
+    private FactoryFinder factoryFinder;
+
+    @Override
+    public Optional<DataTypeConverter> resolve(String scheme, String name, CamelContext context) {
+        String converterName = String.format("%s-%s", scheme, name);
+        Class<?> type = findConverter(converterName, context);
+        if (type == null) {
+            // not found
+            return Optional.empty();
+        }
+
+        if (getLog().isDebugEnabled()) {
+            getLog().debug("Found data type converter: {} via type: {} via: {}{}", converterName,
+                    type.getName(), factoryFinder.getResourcePath(), converterName);
+        }
+
+        // create the converter instance
+        if (DataTypeConverter.class.isAssignableFrom(type)) {
+            try {
+                return Optional.of((DataTypeConverter) context.getInjector().newInstance(type));
+            } catch (NoClassDefFoundError e) {
+                LOG.debug("Ignoring converter type: {} as a dependent class could not be found: {}",
+                        type.getCanonicalName(), e, e);
+            }
+        } else {
+            throw new IllegalArgumentException("Type is not a DataTypeConverter implementation. Found: " + type.getName());
+        }
+
+        return Optional.empty();
+    }
+
+    private Class<?> findConverter(String name, CamelContext context) {
+        if (factoryFinder == null) {
+            factoryFinder = context.adapt(ExtendedCamelContext.class).getFactoryFinder(RESOURCE_PATH);
+        }
+        return factoryFinder.findClass(name).orElse(null);
+    }
+
+    protected Logger getLog() {
+        return LOG;
+    }
+
+}
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
index e7c6e3e8..7105fb4c 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
@@ -25,14 +25,11 @@ import java.util.Optional;
 
 import org.apache.camel.CamelContext;
 import org.apache.camel.CamelContextAware;
-import org.apache.camel.ExtendedCamelContext;
 import org.apache.camel.RuntimeCamelException;
-import org.apache.camel.impl.engine.DefaultInjector;
-import org.apache.camel.impl.engine.DefaultPackageScanClassResolver;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverterResolver;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeRegistry;
-import org.apache.camel.spi.PackageScanClassResolver;
 import org.apache.camel.support.service.ServiceSupport;
 
 /**
@@ -46,10 +43,10 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
 
     private CamelContext camelContext;
 
-    private PackageScanClassResolver resolver;
-
     protected final List<DataTypeLoader> dataTypeLoaders = new ArrayList<>();
 
+    private DataTypeConverterResolver dataTypeConverterResolver;
+
     private final Map<String, List<DataTypeConverter>> dataTypeConverters = new HashMap<>();
 
     @Override
@@ -71,30 +68,19 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
             return Optional.empty();
         }
 
-        Optional<DataTypeConverter> componentDataTypeConverter = getComponentDataTypeConverters(scheme).stream()
-                .filter(dtc -> name.equals(dtc.getName()))
-                .findFirst();
-
-        if (componentDataTypeConverter.isPresent()) {
-            return componentDataTypeConverter;
+        Optional<DataTypeConverter> dataTypeConverter = getDataTypeConverter(scheme, name);
+        if (!dataTypeConverter.isPresent()) {
+            dataTypeConverter = getDataTypeConverter("camel", name);
         }
 
-        return getDefaultDataTypeConverter(name);
+        return dataTypeConverter;
     }
 
     @Override
     protected void doInit() throws Exception {
         super.doInit();
 
-        if (resolver == null) {
-            if (camelContext != null) {
-                resolver = camelContext.adapt(ExtendedCamelContext.class).getPackageScanClassResolver();
-            } else {
-                resolver = new DefaultPackageScanClassResolver();
-            }
-        }
-
-        dataTypeLoaders.add(new AnnotationDataTypeLoader(new DefaultInjector(camelContext), resolver));
+        dataTypeLoaders.add(new AnnotationDataTypeLoader());
 
         addDataTypeConverter(new DefaultDataTypeConverter("string", String.class));
         addDataTypeConverter(new DefaultDataTypeConverter("binary", byte[].class));
@@ -113,20 +99,36 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
     }
 
     /**
-     * Retrieve default data output type from Camel context for given format name.
+     * Retrieve data type converter for given scheme and format name. First checks for matching bean in Camel registry then
+     * tries to get from local cache or perform lazy lookup.
+     * @param scheme
      * @param name
      * @return
      */
-    private Optional<DataTypeConverter> getDefaultDataTypeConverter(String name) {
-        Optional<DataTypeConverter> dataTypeConverter = getComponentDataTypeConverters("camel").stream()
+    private Optional<DataTypeConverter> getDataTypeConverter(String scheme, String name) {
+        if (dataTypeConverterResolver == null) {
+             dataTypeConverterResolver = Optional.ofNullable(camelContext.getRegistry().findSingleByType(DataTypeConverterResolver.class))
+                     .orElseGet(DefaultDataTypeConverterResolver::new);
+        }
+
+        // Looking for matching beans in Camel registry first
+        Optional<DataTypeConverter> dataTypeConverter = Optional.ofNullable(camelContext.getRegistry()
+                .lookupByNameAndType(String.format("%s-%s", scheme, name), DataTypeConverter.class));
+
+        if (!dataTypeConverter.isPresent()) {
+            // Try to retrieve converter from preloaded converters in local cache
+            dataTypeConverter = getComponentDataTypeConverters(scheme).stream()
                 .filter(dtc -> name.equals(dtc.getName()))
                 .findFirst();
+        }
 
-        if (dataTypeConverter.isPresent()) {
-            return dataTypeConverter;
+        if (!dataTypeConverter.isPresent()) {
+            // Try to lazy load converter via resource path lookup
+            dataTypeConverter = dataTypeConverterResolver.resolve(scheme, name, camelContext);
+            dataTypeConverter.ifPresent(converter -> getComponentDataTypeConverters(scheme).add(converter));
         }
 
-        return Optional.ofNullable(camelContext.getRegistry().lookupByNameAndType(name, DataTypeConverter.class));
+        return dataTypeConverter;
     }
 
     /**
@@ -135,11 +137,7 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
      * @return
      */
     private List<DataTypeConverter> getComponentDataTypeConverters(String scheme) {
-        if (!dataTypeConverters.containsKey(scheme)) {
-            dataTypeConverters.put(scheme, new ArrayList<>());
-        }
-
-        return dataTypeConverters.get(scheme);
+        return dataTypeConverters.computeIfAbsent(scheme, (s) -> new ArrayList<>());
     }
 
     @Override
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java
new file mode 100644
index 00000000..17c48664
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format.spi;
+
+import java.util.Optional;
+
+import org.apache.camel.CamelContext;
+
+/**
+ * Represents a resolver of data type converters from a URI to be able to lazy load them using some discovery mechanism.
+ */
+@FunctionalInterface
+public interface DataTypeConverterResolver {
+
+    /**
+     * Attempts to resolve the converter for the given URI.
+     *
+     * @param scheme
+     * @param name
+     * @param camelContext
+     * @return
+     */
+    Optional<DataTypeConverter> resolve(String scheme, String name, CamelContext camelContext);
+
+    /**
+     * Attempts to resolve default converter for the given name.
+     * @param name
+     * @param camelContext
+     * @return
+     */
+    default Optional<DataTypeConverter> resolve(String name, CamelContext camelContext) {
+        return resolve("camel", name, camelContext);
+    }
+}
diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter
similarity index 81%
copy from library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType
copy to library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter
index b51d3404..adf4eb63 100644
--- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType
+++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter
@@ -15,6 +15,4 @@
 # limitations under the License.
 #
 
-org.apache.camel.kamelets.utils.format.converter.standard
-org.apache.camel.kamelets.utils.format.converter.aws2.ddb
-org.apache.camel.kamelets.utils.format.converter.aws2.s3
\ No newline at end of file
+org.apache.camel.kamelets.utils.format.converter.standard
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-ddb-json
similarity index 81%
copy from library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType
copy to library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-ddb-json
index b51d3404..f0194cc4 100644
--- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType
+++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-ddb-json
@@ -15,6 +15,4 @@
 # limitations under the License.
 #
 
-org.apache.camel.kamelets.utils.format.converter.standard
-org.apache.camel.kamelets.utils.format.converter.aws2.ddb
-org.apache.camel.kamelets.utils.format.converter.aws2.s3
\ No newline at end of file
+class=org.apache.camel.kamelets.utils.format.converter.aws2.ddb.Ddb2JsonInputType
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary
similarity index 81%
copy from library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType
copy to library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary
index b51d3404..ba9c13f3 100644
--- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType
+++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary
@@ -15,6 +15,4 @@
 # limitations under the License.
 #
 
-org.apache.camel.kamelets.utils.format.converter.standard
-org.apache.camel.kamelets.utils.format.converter.aws2.ddb
-org.apache.camel.kamelets.utils.format.converter.aws2.s3
\ No newline at end of file
+class=org.apache.camel.kamelets.utils.format.converter.aws2.s3.AWS2S3BinaryOutputType
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-json
similarity index 81%
copy from library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType
copy to library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-json
index b51d3404..7a7c544f 100644
--- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType
+++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-json
@@ -15,6 +15,4 @@
 # limitations under the License.
 #
 
-org.apache.camel.kamelets.utils.format.converter.standard
-org.apache.camel.kamelets.utils.format.converter.aws2.ddb
-org.apache.camel.kamelets.utils.format.converter.aws2.s3
\ No newline at end of file
+class=org.apache.camel.kamelets.utils.format.converter.aws2.s3.AWS2S3JsonOutputType
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java
similarity index 57%
copy from library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java
copy to library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java
index 2ee4113e..1972b047 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java
@@ -19,7 +19,7 @@ package org.apache.camel.kamelets.utils.format;
 
 import java.util.Optional;
 
-import org.apache.camel.CamelContextAware;
+import org.apache.camel.Exchange;
 import org.apache.camel.impl.DefaultCamelContext;
 import org.apache.camel.kamelets.utils.format.converter.standard.JsonModelDataType;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
@@ -27,31 +27,47 @@ import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 
-class DefaultDataTypeRegistryTest {
+class DefaultDataTypeConverterResolverTest {
 
     private DefaultCamelContext camelContext;
 
-    private DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry();
+    private final DefaultDataTypeConverterResolver resolver = new DefaultDataTypeConverterResolver();
 
     @BeforeEach
     void setup() {
         this.camelContext = new DefaultCamelContext();
-        CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext);
     }
 
     @Test
-    public void shouldLookupDefaultDataTypeConverters() throws Exception {
-        Optional<DataTypeConverter> converter = dataTypeRegistry.lookup( "jsonObject");
+    public void shouldHandleUnresolvableDataTypeConverters() throws Exception {
+        Optional<DataTypeConverter> converter = resolver.resolve("unknown", camelContext);
+        Assertions.assertFalse(converter.isPresent());
+
+        converter = resolver.resolve("foo", "unknown", camelContext);
+        Assertions.assertFalse(converter.isPresent());
+    }
+
+    @Test
+    public void shouldResolveDataTypeConverters() throws Exception {
+        Optional<DataTypeConverter> converter = resolver.resolve("jsonObject", camelContext);
         Assertions.assertTrue(converter.isPresent());
         Assertions.assertEquals(JsonModelDataType.class, converter.get().getClass());
-        converter = dataTypeRegistry.lookup( "string");
-        Assertions.assertTrue(converter.isPresent());
-        Assertions.assertEquals(DefaultDataTypeConverter.class, converter.get().getClass());
-        Assertions.assertEquals(String.class, ((DefaultDataTypeConverter) converter.get()).getType());
-        converter = dataTypeRegistry.lookup( "binary");
+
+        converter = resolver.resolve("foo", "json", camelContext);
         Assertions.assertTrue(converter.isPresent());
-        Assertions.assertEquals(DefaultDataTypeConverter.class, converter.get().getClass());
-        Assertions.assertEquals(byte[].class, ((DefaultDataTypeConverter) converter.get()).getType());
+        Assertions.assertEquals(FooConverter.class, converter.get().getClass());
     }
 
+    public static class FooConverter implements DataTypeConverter {
+
+        @Override
+        public void convert(Exchange exchange) {
+            exchange.getMessage().setBody("Foo");
+        }
+
+        @Override
+        public String getName() {
+            return "foo";
+        }
+    }
 }
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java
index 2ee4113e..e077b369 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java
@@ -29,14 +29,11 @@ import org.junit.jupiter.api.Test;
 
 class DefaultDataTypeRegistryTest {
 
-    private DefaultCamelContext camelContext;
-
-    private DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry();
+    private final DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry();
 
     @BeforeEach
     void setup() {
-        this.camelContext = new DefaultCamelContext();
-        CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext);
+        CamelContextAware.trySetCamelContext(dataTypeRegistry, new DefaultCamelContext());
     }
 
     @Test
diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject
similarity index 81%
copy from library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType
copy to library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject
index b51d3404..2f725f6a 100644
--- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType
+++ b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject
@@ -15,6 +15,4 @@
 # limitations under the License.
 #
 
-org.apache.camel.kamelets.utils.format.converter.standard
-org.apache.camel.kamelets.utils.format.converter.aws2.ddb
-org.apache.camel.kamelets.utils.format.converter.aws2.s3
\ No newline at end of file
+class=org.apache.camel.kamelets.utils.format.converter.standard.JsonModelDataType
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/foo-json
similarity index 81%
rename from library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType
rename to library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/foo-json
index b51d3404..ca7eaa02 100644
--- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType
+++ b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/foo-json
@@ -15,6 +15,4 @@
 # limitations under the License.
 #
 
-org.apache.camel.kamelets.utils.format.converter.standard
-org.apache.camel.kamelets.utils.format.converter.aws2.ddb
-org.apache.camel.kamelets.utils.format.converter.aws2.s3
\ No newline at end of file
+class=org.apache.camel.kamelets.utils.format.DefaultDataTypeConverterResolverTest$FooConverter
\ No newline at end of file
diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml
index ba200347..a4e7a114 100644
--- a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml
+++ b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml
@@ -123,6 +123,8 @@ spec:
           value: 'aws2-ddb'
         - key: format
           value: '{{inputFormat}}'
+        - key: registry
+          value: '{{dataTypeRegistry}}'
     from:
       uri: "kamelet:source"
       steps:
diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml
index e09cf4aa..a63af7dc 100644
--- a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml
+++ b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml
@@ -129,6 +129,8 @@ spec:
             value: 'aws2-s3'
           - key: format
             value: '{{outputFormat}}'
+          - key: registry
+            value: '{{dataTypeRegistry}}'
       - name: renameHeaders
         type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders"
         property:


[camel-kamelets] 17/28: Set proper media types

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit 5595de757efb9c191c3ec06e6e97abf302252722
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Thu Nov 24 16:49:34 2022 +0100

    Set proper media types
---
 .../utils/format/DefaultDataTypeConverter.java     | 25 ++++++++++++++++++++-
 .../utils/format/DefaultDataTypeRegistry.java      |  7 +++---
 .../converter/aws2/ddb/Ddb2JsonInputType.java      |  2 +-
 .../converter/aws2/s3/AWS2S3BinaryOutputType.java  |  2 +-
 .../aws2/s3/AWS2S3CloudEventOutputType.java        |  2 +-
 .../converter/standard/JsonModelDataType.java      |  2 +-
 .../utils/format/spi/DataTypeConverter.java        | 26 +++++++++++++++++++++-
 .../utils/format/spi/annotations/DataType.java     |  4 +++-
 8 files changed, 60 insertions(+), 10 deletions(-)

diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java
index 11680b50..9f2c31ce 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java
@@ -19,6 +19,7 @@ package org.apache.camel.kamelets.utils.format;
 
 import org.apache.camel.Exchange;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
 
 /**
  * Default data type converter receives a name and a target type in order to use traditional exchange body conversion
@@ -26,14 +27,26 @@ import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
  */
 public class DefaultDataTypeConverter implements DataTypeConverter {
 
+    private final String scheme;
     private final String name;
+    private final String mediaType;
     private final Class<?> type;
 
-    public DefaultDataTypeConverter(String name, Class<?> type) {
+    public DefaultDataTypeConverter(String scheme, String name, String mediaType, Class<?> type) {
+        this.scheme = scheme;
         this.name = name;
+        this.mediaType = mediaType;
         this.type = type;
     }
 
+    public DefaultDataTypeConverter(String scheme, String name, Class<?> type) {
+        this(scheme, name, "", type);
+    }
+
+    public DefaultDataTypeConverter(String name, Class<?> type) {
+        this(DataType.DEFAULT_SCHEME, name, type);
+    }
+
     @Override
     public void convert(Exchange exchange) {
         if (type.isInstance(exchange.getMessage().getBody())) {
@@ -48,6 +61,16 @@ public class DefaultDataTypeConverter implements DataTypeConverter {
         return name;
     }
 
+    @Override
+    public String getScheme() {
+        return scheme;
+    }
+
+    @Override
+    public String getMediaType() {
+        return mediaType;
+    }
+
     public Class<?> getType() {
         return type;
     }
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
index d393e6c7..3d5b514e 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
@@ -30,6 +30,7 @@ import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverterResolver;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeRegistry;
+import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
 import org.apache.camel.support.CamelContextHelper;
 import org.apache.camel.support.service.ServiceSupport;
 import org.apache.camel.util.ObjectHelper;
@@ -87,7 +88,7 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
 
         Optional<DataTypeConverter> dataTypeConverter = getDataTypeConverter(scheme, name);
         if (!dataTypeConverter.isPresent()) {
-            dataTypeConverter = getDataTypeConverter("camel", name);
+            dataTypeConverter = getDataTypeConverter(DataType.DEFAULT_SCHEME, name);
         }
 
         return dataTypeConverter;
@@ -101,8 +102,8 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
             dataTypeLoaders.add(new AnnotationDataTypeLoader());
         }
 
-        addDataTypeConverter(new DefaultDataTypeConverter("string", String.class));
-        addDataTypeConverter(new DefaultDataTypeConverter("binary", byte[].class));
+        addDataTypeConverter(new DefaultDataTypeConverter(DataType.DEFAULT_SCHEME, "string", "text/plain", String.class));
+        addDataTypeConverter(new DefaultDataTypeConverter(DataType.DEFAULT_SCHEME, "binary", "application/octet-stream", byte[].class));
 
         for (DataTypeLoader loader : dataTypeLoaders) {
             CamelContextAware.trySetCamelContext(loader, getCamelContext());
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputType.java
index a15ff3a0..471e569f 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputType.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputType.java
@@ -77,7 +77,7 @@ import software.amazon.awssdk.services.dynamodb.model.ReturnValue;
  * In case key and item attribute value maps are identical you can omit the special top level properties completely. The
  * converter will map the whole Json body as is then and use it as source for the attribute value map.
  */
-@DataType(scheme = "aws2-ddb", name = "json")
+@DataType(scheme = "aws2-ddb", name = "json", mediaType = "application/json")
 public class Ddb2JsonInputType implements DataTypeConverter {
 
     private final JacksonDataFormat dataFormat = new JacksonDataFormat(new ObjectMapper(), JsonNode.class);
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java
index 6065ebd1..5f1fa0b8 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java
@@ -30,7 +30,7 @@ import software.amazon.awssdk.utils.IoUtils;
 /**
  * Binary output type.
  */
-@DataType(scheme = "aws2-s3", name = "binary")
+@DataType(scheme = "aws2-s3", name = "binary", mediaType = "application/octet-stream")
 public class AWS2S3BinaryOutputType implements DataTypeConverter {
 
     @Override
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
index 399e0111..2eb5cb04 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
@@ -33,7 +33,7 @@ import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
  * Output data type represents AWS S3 get object response as CloudEvent V1. The data type sets Camel specific
  * CloudEvent headers on the exchange.
  */
-@DataType(scheme = "aws2-s3", name = "cloudevents")
+@DataType(scheme = "aws2-s3", name = "cloudevents", mediaType = "application/octet-stream")
 public class AWS2S3CloudEventOutputType implements DataTypeConverter {
 
     @Override
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java
index d8d4ca4e..54c67785 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java
@@ -33,7 +33,7 @@ import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
  * <p/>
  * Unmarshal type should be given as a fully qualified class name in the exchange properties.
  */
-@DataType(name = "jsonObject")
+@DataType(name = "jsonObject", mediaType = "application/json")
 public class JsonModelDataType implements DataTypeConverter {
 
     public static final String DATA_TYPE_MODEL_PROPERTY = "CamelDataTypeModel";
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java
index d39d30f8..a275b67b 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java
@@ -26,7 +26,7 @@ public interface DataTypeConverter {
     void convert(Exchange exchange);
 
     /**
-     * Gets the data type converter name. Automatically derives the name from given type annotation.
+     * Gets the data type converter name. Automatically derives the name from given data type annotation.
      * @return
      */
     default String getName() {
@@ -36,4 +36,28 @@ public interface DataTypeConverter {
 
         throw new UnsupportedOperationException("Missing data type converter name");
     }
+
+    /**
+     * Gets the data type component scheme. Automatically derived from given data type annotation.
+     * @return
+     */
+    default String getScheme() {
+        if (this.getClass().isAnnotationPresent(DataType.class)) {
+            return this.getClass().getAnnotation(DataType.class).scheme();
+        }
+
+        return DataType.DEFAULT_SCHEME;
+    }
+
+    /**
+     * Gets the data type media type. Automatically derived from given data type annotation.
+     * @return
+     */
+    default String getMediaType() {
+        if (this.getClass().isAnnotationPresent(DataType.class)) {
+            return this.getClass().getAnnotation(DataType.class).mediaType();
+        }
+
+        return "";
+    }
 }
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java
index b1d4f5a9..40a3030a 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java
@@ -31,11 +31,13 @@ import java.lang.annotation.Target;
 @Target({ ElementType.TYPE })
 public @interface DataType {
 
+    String DEFAULT_SCHEME = "camel";
+
     /**
      * Camel component scheme.
      * @return
      */
-    String scheme() default "camel";
+    String scheme() default DEFAULT_SCHEME;
 
     /**
      * Data type name.


[camel-kamelets] 22/28: Enhance documentation on data type SPI

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit 429011c40a1ec53834975ae0134e71700edb0543
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Tue Nov 29 11:01:13 2022 +0100

    Enhance documentation on data type SPI
---
 .../utils/format/spi/DataTypeConverter.java        | 20 +++++++++---
 .../format/spi/DataTypeConverterResolver.java      | 26 ++++++++-------
 .../kamelets/utils/format/spi/DataTypeLoader.java  |  6 ++--
 .../utils/format/spi/DataTypeRegistry.java         | 38 +++++++++++++---------
 .../utils/format/spi/annotations/DataType.java     | 15 +++++----
 5 files changed, 66 insertions(+), 39 deletions(-)

diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java
index a275b67b..f9c175b0 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java
@@ -20,14 +20,23 @@ package org.apache.camel.kamelets.utils.format.spi;
 import org.apache.camel.Exchange;
 import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
 
+/**
+ * Converter applies custom logic to a given exchange in order to update the message content in that exchange according to
+ * the data type.
+ */
 @FunctionalInterface
 public interface DataTypeConverter {
 
+    /**
+     * Changes the exchange message content (body and/or header) to represent the data type.
+     * @param exchange the exchange that should have its message content applied to the data type.
+     */
     void convert(Exchange exchange);
 
     /**
-     * Gets the data type converter name. Automatically derives the name from given data type annotation.
-     * @return
+     * Gets the data type converter name. Automatically derives the name from given data type annotation if any.
+     * Subclasses may add a fallback logic to determine the data type name in case the annotation is missing.
+     * @return the name of the data type.
      */
     default String getName() {
         if (this.getClass().isAnnotationPresent(DataType.class)) {
@@ -39,7 +48,8 @@ public interface DataTypeConverter {
 
     /**
      * Gets the data type component scheme. Automatically derived from given data type annotation.
-     * @return
+     * Subclasses may add custom logic to determine the data type scheme. By default, the generic Camel scheme is used.
+     * @return the component scheme of the data type.
      */
     default String getScheme() {
         if (this.getClass().isAnnotationPresent(DataType.class)) {
@@ -51,7 +61,9 @@ public interface DataTypeConverter {
 
     /**
      * Gets the data type media type. Automatically derived from given data type annotation.
-     * @return
+     * Subclasses may add additional logic to determine the media type when annotation is missing.
+     * By default, returns empty String as a media type.
+     * @return the media type of the data type.
      */
     default String getMediaType() {
         if (this.getClass().isAnnotationPresent(DataType.class)) {
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java
index 17c48664..f54aaa92 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java
@@ -20,30 +20,34 @@ package org.apache.camel.kamelets.utils.format.spi;
 import java.util.Optional;
 
 import org.apache.camel.CamelContext;
+import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
 
 /**
- * Represents a resolver of data type converters from a URI to be able to lazy load them using some discovery mechanism.
+ * Resolves data type converters from URI to be able to lazy load converters using factory finder discovery mechanism.
  */
 @FunctionalInterface
 public interface DataTypeConverterResolver {
 
     /**
-     * Attempts to resolve the converter for the given URI.
+     * Attempts to resolve the converter for the given scheme and name. Usually uses the factory finder URI to resolve the converter.
+     * Scheme and name may be combined in order to resolve component specific converters. Usually implements a fallback
+     * resolving mechanism when no matching converter for scheme and name is found (e.g. search for generic Camel converters just using the name).
      *
-     * @param scheme
-     * @param name
-     * @param camelContext
-     * @return
+     * @param scheme the data type scheme.
+     * @param name the data type name.
+     * @param camelContext the current Camel context.
+     * @return optional data type resolved via URI factory finder.
      */
     Optional<DataTypeConverter> resolve(String scheme, String name, CamelContext camelContext);
 
     /**
-     * Attempts to resolve default converter for the given name.
-     * @param name
-     * @param camelContext
-     * @return
+     * Attempts to resolve default converter for the given name. Uses default Camel scheme to resolve the converter via factory finder mechanism.
+     *
+     * @param name the data type name.
+     * @param camelContext the current Camel context.
+     * @return optional data type resolved via URI factory finder.
      */
     default Optional<DataTypeConverter> resolve(String name, CamelContext camelContext) {
-        return resolve("camel", name, camelContext);
+        return resolve(DataType.DEFAULT_SCHEME, name, camelContext);
     }
 }
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java
index 73f87c69..453485fe 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java
@@ -18,14 +18,14 @@
 package org.apache.camel.kamelets.utils.format.spi;
 
 /**
- * A pluggable strategy to load data types into a {@link DataTypeRegistry}.
+ * A pluggable strategy to load data types into a {@link DataTypeRegistry}. Loads one to many data type converters to the given registry.
  */
 public interface DataTypeLoader {
 
     /**
-     * A pluggable strategy to load data types into a registry.
+     * A pluggable strategy to load data types into a given registry.
      *
-     * @param  registry the registry to load the data types into
+     * @param  registry the registry to load the data types into.
      */
     void load(DataTypeRegistry registry);
 }
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java
index cb2bedc9..d4718547 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java
@@ -19,42 +19,50 @@ package org.apache.camel.kamelets.utils.format.spi;
 
 import java.util.Optional;
 
+import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
+
 /**
- * Registry for data types. Data type loaders should be used to add types to the registry.
+ * Registry for data types and its converters. Data type loaders should be used to add members to the registry.
  * <p/>
- * The registry is able to perform a lookup of a specific data type.
+ * The registry is able to perform a lookup of a specific data type by its given scheme and name. Usually data types are grouped
+ * by their component scheme so users may use component specific converters and default Camel converters.
  */
 public interface DataTypeRegistry {
 
     /**
-     * Registers a new default data type converter.
-     * @param scheme
-     * @param converter
+     * Registers a new default data type converter. Usually used to add default Camel converter implementations.
+     *
+     * @param scheme the data type scheme.
+     * @param converter the converter implementation.
      */
     void addDataTypeConverter(String scheme, DataTypeConverter converter);
 
     /**
-     * Registers a new default data type converter.
-     * @param converter
+     * Registers a new default data type converter. Uses the default Camel scheme to mark this converter as generic one.
+     *
+     * @param converter the data type converter implementation.
      */
     default void addDataTypeConverter(DataTypeConverter converter) {
-        addDataTypeConverter("camel", converter);
+        addDataTypeConverter(DataType.DEFAULT_SCHEME, converter);
     }
 
     /**
-     * Find data type for given component scheme and data type name.
-     * @param scheme
-     * @param name
-     * @return
+     * Find data type for given component scheme and data type name. Searches for the component scheme specific converter first.
+     * As a fallback may also try to resolve the converter with only the name in the given set of default Camel converters registered in this registry.
+     *
+     * @param scheme the data type converter scheme (usually a component scheme).
+     * @param name the data type converter name.
+     * @return optional data type converter implementation matching the given scheme and name.
      */
     Optional<DataTypeConverter> lookup(String scheme, String name);
 
     /**
-     * Find data type for given data type name.
-     * @param name
+     * Find data type for given data type name. Just searches the set of default Camel converter implementations registered in this registry.
+     *
+     * @param name the data type converter name.
      * @return
      */
     default Optional<DataTypeConverter> lookup(String name) {
-        return lookup("camel", name);
+        return lookup(DataType.DEFAULT_SCHEME, name);
     }
 }
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java
index 40a3030a..b5208887 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java
@@ -24,7 +24,10 @@ import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
 /**
- * Data type annotation defines a type with its component scheme, a name and input/output types.
+ * Data type annotation defines a data type with its component scheme, a name and optional media types.
+ * <p/>
+ * The annotation is used by specific classpath scanning data type loaders to automatically add the data types to
+ * a registry.
  */
 @Retention(RetentionPolicy.RUNTIME)
 @Documented
@@ -34,20 +37,20 @@ public @interface DataType {
     String DEFAULT_SCHEME = "camel";
 
     /**
-     * Camel component scheme.
-     * @return
+     * Camel component scheme. Specifies whether a data type is component specific.
+     * @return the data type scheme.
      */
     String scheme() default DEFAULT_SCHEME;
 
     /**
-     * Data type name.
-     * @return
+     * Data type name. Identifies the data type. Should be unique in combination with scheme.
+     * @return the data type name.
      */
     String name();
 
     /**
      * The media type associated with this data type.
-     * @return
+     * @return the media type or empty string as default.
      */
     String mediaType() default "";
 }


[camel-kamelets] 09/28: Preserve AWS S3 Key header as it is required during onCompletion

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit baba9ef6657ad3d8f5bbb7b99f9d53e6dbbeb223
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Fri Nov 18 17:28:11 2022 +0100

    Preserve AWS S3 Key header as it is required during onCompletion
---
 .../utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java      | 2 --
 .../utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java  | 2 +-
 2 files changed, 1 insertion(+), 3 deletions(-)

diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
index 655a4cef..13579054 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
@@ -50,8 +50,6 @@ public class AWS2S3CloudEventOutputType implements DataTypeConverter {
         if (encoding != null) {
             headers.put(CloudEvent.CAMEL_CLOUD_EVENT_DATA_CONTENT_ENCODING, encoding);
         }
-
-        exchange.getMessage().removeHeaders("CamelAwsS3*");
     }
 
     private String getEventTime(Exchange exchange) {
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
index 10c51708..0a71f90d 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
@@ -52,7 +52,7 @@ class AWS2S3CloudEventOutputTypeTest {
         outputType.convert(exchange);
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
-        Assertions.assertFalse(exchange.getMessage().getHeaders().containsKey(AWS2S3Constants.KEY));
+        Assertions.assertTrue(exchange.getMessage().getHeaders().containsKey(AWS2S3Constants.KEY));
         assertEquals("kamelet:aws-s3-source", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_TYPE));
         assertEquals("test1.txt", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SUBJECT));
         assertEquals("myBucket", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SOURCE));


[camel-kamelets] 08/28: Load S3 converters via annotation scan

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit cf2696045ecc055791599ab31993e26e3625be0e
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Fri Nov 18 15:26:17 2022 +0100

    Load S3 converters via annotation scan
---
 .../resources/META-INF/services/org/apache/camel/DataTypeConverter     | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter
index adf4eb63..46b63db2 100644
--- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter
+++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter
@@ -15,4 +15,5 @@
 # limitations under the License.
 #
 
-org.apache.camel.kamelets.utils.format.converter.standard
\ No newline at end of file
+org.apache.camel.kamelets.utils.format.converter.standard
+org.apache.camel.kamelets.utils.format.converter.aws2.s3


[camel-kamelets] 28/28: Include experimental Kamelets in the catalog

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit b1dec7f37c5e3e6ab7726a41c469c3b8d208e850
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Thu Dec 1 15:50:32 2022 +0100

    Include experimental Kamelets in the catalog
---
 .github/workflows/yaks-tests.yaml                              |  9 ++++-----
 .../aws-ddb-experimental-sink.kamelet.yaml                     | 10 ++++++----
 .../aws-s3-experimental-source.kamelet.yaml                    |  8 +++++---
 .../resources/kamelets/aws-ddb-experimental-sink.kamelet.yaml  | 10 ++++++----
 .../resources/kamelets/aws-s3-experimental-source.kamelet.yaml |  8 +++++---
 .../experimental/aws-ddb-sink-exp}/amazonDDBClient.groovy      |  0
 .../experimental/aws-ddb-sink-exp}/aws-ddb-sink-binding.yaml   |  4 ++--
 .../aws-ddb-sink-exp}/aws-ddb-sink-deleteItem.feature          | 10 +++++-----
 .../aws-ddb-sink-exp}/aws-ddb-sink-putItem.feature             | 10 +++++-----
 .../aws-ddb-sink-exp}/aws-ddb-sink-updateItem.feature          | 10 +++++-----
 .../experimental/aws-ddb-sink-exp}/putItem.groovy              |  0
 .../experimental/aws-ddb-sink-exp}/verifyItems.groovy          |  0
 .../experimental/aws-ddb-sink-exp}/yaks-config.yaml            |  4 ----
 .../experimental/aws-s3-exp}/amazonS3Client.groovy             |  0
 .../experimental/aws-s3-exp}/aws-s3-cloudevents.feature        | 10 +++++-----
 .../experimental/aws-s3-exp}/aws-s3-knative.feature            | 10 +++++-----
 .../experimental/aws-s3-exp}/aws-s3-to-knative.yaml            |  4 ++--
 .../aws-s3 => test/experimental/aws-s3-exp}/yaks-config.yaml   |  4 ----
 18 files changed, 55 insertions(+), 56 deletions(-)

diff --git a/.github/workflows/yaks-tests.yaml b/.github/workflows/yaks-tests.yaml
index 7f168ca2..a398b1a7 100644
--- a/.github/workflows/yaks-tests.yaml
+++ b/.github/workflows/yaks-tests.yaml
@@ -65,7 +65,6 @@ jobs:
 
         # Overwrite JitPack coordinates in the local Kamelets so the tests can use the utility classes in this PR
         find kamelets -maxdepth 1 -name '*.kamelet.yaml' -exec sed -i "s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT/g" {} +
-        find experimental -maxdepth 1 -name '*.kamelet.yaml' -exec sed -i "s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT/g" {} +
     - name: Get Camel K CLI
       run: |
         curl --fail -L --silent https://github.com/apache/camel-k/releases/download/v${CAMEL_K_VERSION}/camel-k-client-${CAMEL_K_VERSION}-linux-64bit.tar.gz -o kamel.tar.gz
@@ -110,7 +109,7 @@ jobs:
         yaks install --operator-image $YAKS_IMAGE_NAME:$YAKS_VERSION
     - name: YAKS Tests
       run: |
-        echo "Running tests"
+        echo "Running tests for Kamelets"
         yaks run test/aws-ddb-sink $YAKS_RUN_OPTIONS
         
         yaks run test/aws-s3 $YAKS_RUN_OPTIONS
@@ -122,11 +121,11 @@ jobs:
         yaks run test/earthquake-source $YAKS_RUN_OPTIONS
         yaks run test/rest-openapi-sink $YAKS_RUN_OPTIONS
         yaks run test/kafka $YAKS_RUN_OPTIONS
-    - name: YAKS Tests on experimental Kamelets
+    - name: YAKS Tests experimental Kamelets
       run: |
         echo "Running tests for experimental Kamelets"
-        yaks run experimental/test/aws-ddb-sink $YAKS_RUN_OPTIONS
-        yaks run experimental/test/aws-s3 $YAKS_RUN_OPTIONS
+        yaks run test/experimental/aws-ddb-sink-exp $YAKS_RUN_OPTIONS
+        yaks run test/experimental/aws-s3-exp $YAKS_RUN_OPTIONS
     - name: YAKS Report
       if: failure()
       run: |
diff --git a/experimental/aws-ddb-sink.exp.kamelet.yaml b/kamelets/aws-ddb-experimental-sink.kamelet.yaml
similarity index 95%
copy from experimental/aws-ddb-sink.exp.kamelet.yaml
copy to kamelets/aws-ddb-experimental-sink.kamelet.yaml
index e19185fa..a98ecb44 100644
--- a/experimental/aws-ddb-sink.exp.kamelet.yaml
+++ b/kamelets/aws-ddb-experimental-sink.kamelet.yaml
@@ -18,9 +18,9 @@
 apiVersion: camel.apache.org/v1alpha1
 kind: Kamelet
 metadata:
-  name: aws-ddb-sink-experimental
+  name: aws-ddb-experimental-sink
   annotations:
-    camel.apache.org/kamelet.support.level: "Experiemental"
+    camel.apache.org/kamelet.support.level: "Experimental"
     camel.apache.org/catalog.version: "main-SNAPSHOT"
     camel.apache.org/kamelet.icon: "data:image/svg+xml;base64,PHN2ZyBoZWlnaHQ9IjEwMCIgd2lkdGg9IjEwMCIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48cGF0aCBmaWxsPSIjMkQ3MkI4IiBkPSJNNzQuMTc0IDMxLjgwN2w3LjQzNyA1LjM2N3YtNy42MDJsLTcuNDgtOC43NjV2MTAuOTU3bC4wNDMuMDE1eiIvPjxwYXRoIGZpbGw9IiM1Mjk0Q0YiIGQ9Ik01OS44MzggODUuNjY2bDE0LjI5My03LjE0NlYyMC43OTFsLTE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMjA1Qjk4IiBkPSJNMzkuNDk2IDg1LjY2NkwyNS4yMDMgNzguNTJWMjAuNzkxbDE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMkQ3Mk [...]
     camel.apache.org/provider: "Apache Software Foundation"
@@ -29,7 +29,7 @@ metadata:
     camel.apache.org/kamelet.type: "sink"
 spec:
   definition:
-    title: "AWS DynamoDB Sink"
+    title: "AWS DynamoDB Experimental Sink"
     description: |-
       Send data to Amazon DynamoDB. The sent data inserts, updates, or deletes an item on the specified AWS DynamoDB table.
 
@@ -37,7 +37,9 @@ spec:
 
       If you use the default credentials provider, the DynamoDB client loads the credentials through this provider and doesn't use the basic authentication method.
 
-      This Kamelet expects a JSON-formatted body and it must include the primary key values that define the DynamoDB item. The mapping between the JSON fields and table attribute values is done by key. For example, for  '{"username":"oscerd", "city":"Rome"}' input, the Kamelet inserts or update an item in the specified AWS DynamoDB table and sets the values for the 'username' and 'city' attributes. 
+      This Kamelet expects a JSON-formatted body and it must include the primary key values that define the DynamoDB item. The mapping between the JSON fields and table attribute values is done by key. For example, for  '{"username":"oscerd", "city":"Rome"}' input, the Kamelet inserts or update an item in the specified AWS DynamoDB table and sets the values for the 'username' and 'city' attributes.
+      
+      This Kamelet supports experimental input format to specify the data type that that is given to this sink. The Kamelet will do best effort to convert the provided input type to the required input for the sink.
     required:
       - table
       - region
diff --git a/experimental/aws-s3-source.exp.kamelet.yaml b/kamelets/aws-s3-experimental-source.kamelet.yaml
similarity index 96%
copy from experimental/aws-s3-source.exp.kamelet.yaml
copy to kamelets/aws-s3-experimental-source.kamelet.yaml
index 7a8d8fe5..504157c1 100644
--- a/experimental/aws-s3-source.exp.kamelet.yaml
+++ b/kamelets/aws-s3-experimental-source.kamelet.yaml
@@ -1,7 +1,7 @@
 apiVersion: camel.apache.org/v1alpha1
 kind: Kamelet
 metadata:
-  name: aws-s3-source-experimental
+  name: aws-s3-experimental-source
   annotations:
     camel.apache.org/kamelet.support.level: "Experimental"
     camel.apache.org/catalog.version: "main-SNAPSHOT"
@@ -12,7 +12,7 @@ metadata:
     camel.apache.org/kamelet.type: "source"
 spec:
   definition:
-    title: "AWS S3 Source"
+    title: "AWS S3 Experimental Source"
     description: |-
       Receive data from an Amazon S3 Bucket.
 
@@ -20,7 +20,9 @@ spec:
       
       If you use the default credentials provider, the S3 client loads the credentials through this provider and doesn't use the basic authentication method.
 
-      Two headers will be duplicated with different names for clarity at sink level, CamelAwsS3Key will be duplicated into aws.s3.key and CamelAwsS3BucketName will be duplicated in aws.s3.bucket.name
+      Two headers will be duplicated with different names for clarity at sink level, CamelAwsS3Key will be duplicated into aws.s3.key and CamelAwsS3BucketName will be duplicated in aws.s3.bucket.name.
+      
+      This Kamelet supports experimental output format to specify the data type produced by this source. Users of the Kamelet are able to choose from different output types.
     required:
       - bucketNameOrArn
       - region
diff --git a/experimental/aws-ddb-sink.exp.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-experimental-sink.kamelet.yaml
similarity index 95%
rename from experimental/aws-ddb-sink.exp.kamelet.yaml
rename to library/camel-kamelets/src/main/resources/kamelets/aws-ddb-experimental-sink.kamelet.yaml
index e19185fa..a98ecb44 100644
--- a/experimental/aws-ddb-sink.exp.kamelet.yaml
+++ b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-experimental-sink.kamelet.yaml
@@ -18,9 +18,9 @@
 apiVersion: camel.apache.org/v1alpha1
 kind: Kamelet
 metadata:
-  name: aws-ddb-sink-experimental
+  name: aws-ddb-experimental-sink
   annotations:
-    camel.apache.org/kamelet.support.level: "Experiemental"
+    camel.apache.org/kamelet.support.level: "Experimental"
     camel.apache.org/catalog.version: "main-SNAPSHOT"
     camel.apache.org/kamelet.icon: "data:image/svg+xml;base64,PHN2ZyBoZWlnaHQ9IjEwMCIgd2lkdGg9IjEwMCIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48cGF0aCBmaWxsPSIjMkQ3MkI4IiBkPSJNNzQuMTc0IDMxLjgwN2w3LjQzNyA1LjM2N3YtNy42MDJsLTcuNDgtOC43NjV2MTAuOTU3bC4wNDMuMDE1eiIvPjxwYXRoIGZpbGw9IiM1Mjk0Q0YiIGQ9Ik01OS44MzggODUuNjY2bDE0LjI5My03LjE0NlYyMC43OTFsLTE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMjA1Qjk4IiBkPSJNMzkuNDk2IDg1LjY2NkwyNS4yMDMgNzguNTJWMjAuNzkxbDE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMkQ3Mk [...]
     camel.apache.org/provider: "Apache Software Foundation"
@@ -29,7 +29,7 @@ metadata:
     camel.apache.org/kamelet.type: "sink"
 spec:
   definition:
-    title: "AWS DynamoDB Sink"
+    title: "AWS DynamoDB Experimental Sink"
     description: |-
       Send data to Amazon DynamoDB. The sent data inserts, updates, or deletes an item on the specified AWS DynamoDB table.
 
@@ -37,7 +37,9 @@ spec:
 
       If you use the default credentials provider, the DynamoDB client loads the credentials through this provider and doesn't use the basic authentication method.
 
-      This Kamelet expects a JSON-formatted body and it must include the primary key values that define the DynamoDB item. The mapping between the JSON fields and table attribute values is done by key. For example, for  '{"username":"oscerd", "city":"Rome"}' input, the Kamelet inserts or update an item in the specified AWS DynamoDB table and sets the values for the 'username' and 'city' attributes. 
+      This Kamelet expects a JSON-formatted body and it must include the primary key values that define the DynamoDB item. The mapping between the JSON fields and table attribute values is done by key. For example, for  '{"username":"oscerd", "city":"Rome"}' input, the Kamelet inserts or update an item in the specified AWS DynamoDB table and sets the values for the 'username' and 'city' attributes.
+      
+      This Kamelet supports experimental input format to specify the data type that that is given to this sink. The Kamelet will do best effort to convert the provided input type to the required input for the sink.
     required:
       - table
       - region
diff --git a/experimental/aws-s3-source.exp.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-experimental-source.kamelet.yaml
similarity index 96%
rename from experimental/aws-s3-source.exp.kamelet.yaml
rename to library/camel-kamelets/src/main/resources/kamelets/aws-s3-experimental-source.kamelet.yaml
index 7a8d8fe5..504157c1 100644
--- a/experimental/aws-s3-source.exp.kamelet.yaml
+++ b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-experimental-source.kamelet.yaml
@@ -1,7 +1,7 @@
 apiVersion: camel.apache.org/v1alpha1
 kind: Kamelet
 metadata:
-  name: aws-s3-source-experimental
+  name: aws-s3-experimental-source
   annotations:
     camel.apache.org/kamelet.support.level: "Experimental"
     camel.apache.org/catalog.version: "main-SNAPSHOT"
@@ -12,7 +12,7 @@ metadata:
     camel.apache.org/kamelet.type: "source"
 spec:
   definition:
-    title: "AWS S3 Source"
+    title: "AWS S3 Experimental Source"
     description: |-
       Receive data from an Amazon S3 Bucket.
 
@@ -20,7 +20,9 @@ spec:
       
       If you use the default credentials provider, the S3 client loads the credentials through this provider and doesn't use the basic authentication method.
 
-      Two headers will be duplicated with different names for clarity at sink level, CamelAwsS3Key will be duplicated into aws.s3.key and CamelAwsS3BucketName will be duplicated in aws.s3.bucket.name
+      Two headers will be duplicated with different names for clarity at sink level, CamelAwsS3Key will be duplicated into aws.s3.key and CamelAwsS3BucketName will be duplicated in aws.s3.bucket.name.
+      
+      This Kamelet supports experimental output format to specify the data type produced by this source. Users of the Kamelet are able to choose from different output types.
     required:
       - bucketNameOrArn
       - region
diff --git a/experimental/test/aws-ddb-sink/amazonDDBClient.groovy b/test/experimental/aws-ddb-sink-exp/amazonDDBClient.groovy
similarity index 100%
rename from experimental/test/aws-ddb-sink/amazonDDBClient.groovy
rename to test/experimental/aws-ddb-sink-exp/amazonDDBClient.groovy
diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-binding.yaml
similarity index 95%
rename from experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml
rename to test/experimental/aws-ddb-sink-exp/aws-ddb-sink-binding.yaml
index 6b4b2b02..d1e5fb44 100644
--- a/experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml
+++ b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-binding.yaml
@@ -18,7 +18,7 @@
 apiVersion: camel.apache.org/v1alpha1
 kind: KameletBinding
 metadata:
-  name: aws-ddb-sink-binding
+  name: aws-ddb-experimental-sink-binding
 spec:
   source:
     ref:
@@ -39,7 +39,7 @@ spec:
     ref:
       kind: Kamelet
       apiVersion: camel.apache.org/v1alpha1
-      name: aws-ddb-sink-experimental
+      name: aws-ddb-experimental-sink
     properties:
       table: ${aws.ddb.tableName}
       operation: ${aws.ddb.operation}
diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-deleteItem.feature
similarity index 87%
rename from experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature
rename to test/experimental/aws-ddb-sink-exp/aws-ddb-sink-deleteItem.feature
index 6c54fdc3..d535b82f 100644
--- a/experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature
+++ b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-deleteItem.feature
@@ -18,7 +18,7 @@
 Feature: AWS DDB Sink - DeleteItem
 
   Background:
-    Given Kamelet aws-ddb-sink-experimental is available
+    Given Kamelet aws-ddb-experimental-sink is available
     Given Camel K resource polling configuration
       | maxAttempts          | 200   |
       | delayBetweenAttempts | 2000  |
@@ -48,9 +48,9 @@ Feature: AWS DDB Sink - DeleteItem
 
   Scenario: Create AWS-DDB Kamelet sink binding
     When load KameletBinding aws-ddb-sink-binding.yaml
-    And KameletBinding aws-ddb-sink-binding is available
-    And Camel K integration aws-ddb-sink-binding is running
-    And Camel K integration aws-ddb-sink-binding should print Routes startup
+    And KameletBinding aws-ddb-experimental-sink-binding is available
+    And Camel K integration aws-ddb-experimental-sink-binding is running
+    And Camel K integration aws-ddb-experimental-sink-binding should print Routes startup
     Then sleep 10sec
 
   Scenario: Verify Kamelet sink
@@ -59,7 +59,7 @@ Feature: AWS DDB Sink - DeleteItem
     Then run script verifyItems.groovy
 
   Scenario: Remove Camel K resources
-    Given delete KameletBinding aws-ddb-sink-binding
+    Given delete KameletBinding aws-ddb-experimental-sink-binding
 
   Scenario: Stop container
     Given stop LocalStack container
diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-putItem.feature
similarity index 86%
rename from experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature
rename to test/experimental/aws-ddb-sink-exp/aws-ddb-sink-putItem.feature
index f117889b..637b1dab 100644
--- a/experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature
+++ b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-putItem.feature
@@ -18,7 +18,7 @@
 Feature: AWS DDB Sink - PutItem
 
   Background:
-    Given Kamelet aws-ddb-sink-experimental is available
+    Given Kamelet aws-ddb-experimental-sink is available
     Given Camel K resource polling configuration
       | maxAttempts          | 200   |
       | delayBetweenAttempts | 2000  |
@@ -43,16 +43,16 @@ Feature: AWS DDB Sink - PutItem
 
   Scenario: Create AWS-DDB Kamelet sink binding
     When load KameletBinding aws-ddb-sink-binding.yaml
-    And KameletBinding aws-ddb-sink-binding is available
-    And Camel K integration aws-ddb-sink-binding is running
-    And Camel K integration aws-ddb-sink-binding should print Routes startup
+    And KameletBinding aws-ddb-experimental-sink-binding is available
+    And Camel K integration aws-ddb-experimental-sink-binding is running
+    And Camel K integration aws-ddb-experimental-sink-binding should print Routes startup
     Then sleep 10sec
 
   Scenario: Verify Kamelet sink
     Then run script verifyItems.groovy
 
   Scenario: Remove Camel K resources
-    Given delete KameletBinding aws-ddb-sink-binding
+    Given delete KameletBinding aws-ddb-experimental-sink-binding
 
   Scenario: Stop container
     Given stop LocalStack container
diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-updateItem.feature
similarity index 89%
rename from experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature
rename to test/experimental/aws-ddb-sink-exp/aws-ddb-sink-updateItem.feature
index 215adbe2..5a0a29c1 100644
--- a/experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature
+++ b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-updateItem.feature
@@ -18,7 +18,7 @@
 Feature: AWS DDB Sink - UpdateItem
 
   Background:
-    Given Kamelet aws-ddb-sink-experimental is available
+    Given Kamelet aws-ddb-experimental-sink is available
     Given Camel K resource polling configuration
       | maxAttempts          | 200   |
       | delayBetweenAttempts | 2000  |
@@ -50,9 +50,9 @@ Feature: AWS DDB Sink - UpdateItem
 
   Scenario: Create AWS-DDB Kamelet sink binding
     When load KameletBinding aws-ddb-sink-binding.yaml
-    And KameletBinding aws-ddb-sink-binding is available
-    And Camel K integration aws-ddb-sink-binding is running
-    And Camel K integration aws-ddb-sink-binding should print Routes startup
+    And KameletBinding aws-ddb-experimental-sink-binding is available
+    And Camel K integration aws-ddb-experimental-sink-binding is running
+    And Camel K integration aws-ddb-experimental-sink-binding should print Routes startup
     Then sleep 10sec
 
   Scenario: Verify Kamelet sink
@@ -62,7 +62,7 @@ Feature: AWS DDB Sink - UpdateItem
     Then run script verifyItems.groovy
 
   Scenario: Remove Camel K resources
-    Given delete KameletBinding aws-ddb-sink-binding
+    Given delete KameletBinding aws-ddb-experimental-sink-binding
 
   Scenario: Stop container
     Given stop LocalStack container
diff --git a/experimental/test/aws-ddb-sink/putItem.groovy b/test/experimental/aws-ddb-sink-exp/putItem.groovy
similarity index 100%
rename from experimental/test/aws-ddb-sink/putItem.groovy
rename to test/experimental/aws-ddb-sink-exp/putItem.groovy
diff --git a/experimental/test/aws-ddb-sink/verifyItems.groovy b/test/experimental/aws-ddb-sink-exp/verifyItems.groovy
similarity index 100%
rename from experimental/test/aws-ddb-sink/verifyItems.groovy
rename to test/experimental/aws-ddb-sink-exp/verifyItems.groovy
diff --git a/experimental/test/aws-ddb-sink/yaks-config.yaml b/test/experimental/aws-ddb-sink-exp/yaks-config.yaml
similarity index 93%
rename from experimental/test/aws-ddb-sink/yaks-config.yaml
rename to test/experimental/aws-ddb-sink-exp/yaks-config.yaml
index 51cf3b52..15156f08 100644
--- a/experimental/test/aws-ddb-sink/yaks-config.yaml
+++ b/test/experimental/aws-ddb-sink-exp/yaks-config.yaml
@@ -56,7 +56,3 @@ config:
     failedOnly: true
     includes:
       - app=camel-k
-pre:
-  - name: Install experimental Kamelets
-    run: |
-      kubectl apply -f ../../aws-ddb-sink.exp.kamelet.yaml -n $YAKS_NAMESPACE
diff --git a/experimental/test/aws-s3/amazonS3Client.groovy b/test/experimental/aws-s3-exp/amazonS3Client.groovy
similarity index 100%
rename from experimental/test/aws-s3/amazonS3Client.groovy
rename to test/experimental/aws-s3-exp/amazonS3Client.groovy
diff --git a/experimental/test/aws-s3/aws-s3-cloudevents.feature b/test/experimental/aws-s3-exp/aws-s3-cloudevents.feature
similarity index 86%
rename from experimental/test/aws-s3/aws-s3-cloudevents.feature
rename to test/experimental/aws-s3-exp/aws-s3-cloudevents.feature
index 6f5513fc..2ce2d0d6 100644
--- a/experimental/test/aws-s3/aws-s3-cloudevents.feature
+++ b/test/experimental/aws-s3-exp/aws-s3-cloudevents.feature
@@ -3,7 +3,7 @@
 Feature: AWS S3 Kamelet - cloud events data type
 
   Background:
-    Given Kamelet aws-s3-source-experimental is available
+    Given Kamelet aws-s3-experimental-source is available
     Given Knative event consumer timeout is 20000 ms
     Given Camel K resource polling configuration
       | maxAttempts          | 200   |
@@ -30,9 +30,9 @@ Feature: AWS S3 Kamelet - cloud events data type
   Scenario: Create AWS-S3 Kamelet to Knative binding
     Given variable loginfo is "Installed features"
     When load KameletBinding aws-s3-to-knative.yaml
-    And KameletBinding aws-s3-to-knative is available
-    And Camel K integration aws-s3-to-knative is running
-    Then Camel K integration aws-s3-to-knative should print ${loginfo}
+    And KameletBinding aws-s3-to-knative-binding is available
+    And Camel K integration aws-s3-to-knative-binding is running
+    Then Camel K integration aws-s3-to-knative-binding should print ${loginfo}
 
   Scenario: Verify Kamelet source
     Given create Knative event consumer service event-consumer-service
@@ -48,7 +48,7 @@ Feature: AWS S3 Kamelet - cloud events data type
       | id              | @ignore@ |
 
   Scenario: Remove Camel K resources
-    Given delete KameletBinding aws-s3-to-knative
+    Given delete KameletBinding aws-s3-to-knative-binding
     Given delete Kubernetes service event-consumer-service
 
   Scenario: Stop container
diff --git a/experimental/test/aws-s3/aws-s3-knative.feature b/test/experimental/aws-s3-exp/aws-s3-knative.feature
similarity index 85%
rename from experimental/test/aws-s3/aws-s3-knative.feature
rename to test/experimental/aws-s3-exp/aws-s3-knative.feature
index 8a6512a9..bb1bebd3 100644
--- a/experimental/test/aws-s3/aws-s3-knative.feature
+++ b/test/experimental/aws-s3-exp/aws-s3-knative.feature
@@ -3,7 +3,7 @@
 Feature: AWS S3 Kamelet - Knative binding
 
   Background:
-    Given Kamelet aws-s3-source-experimental is available
+    Given Kamelet aws-s3-experimental-source is available
     Given Knative event consumer timeout is 20000 ms
     Given Camel K resource polling configuration
       | maxAttempts          | 200   |
@@ -30,9 +30,9 @@ Feature: AWS S3 Kamelet - Knative binding
   Scenario: Create AWS-S3 Kamelet to Knative binding
     Given variable loginfo is "Installed features"
     When load KameletBinding aws-s3-to-knative.yaml
-    And KameletBinding aws-s3-to-knative is available
-    And Camel K integration aws-s3-to-knative is running
-    Then Camel K integration aws-s3-to-knative should print ${loginfo}
+    And KameletBinding aws-s3-to-knative-binding is available
+    And Camel K integration aws-s3-to-knative-binding is running
+    Then Camel K integration aws-s3-to-knative-binding should print ${loginfo}
 
   Scenario: Verify Kamelet source
     Given create Knative event consumer service event-consumer-service
@@ -47,7 +47,7 @@ Feature: AWS S3 Kamelet - Knative binding
       | id              | @ignore@ |
 
   Scenario: Remove Camel K resources
-    Given delete KameletBinding aws-s3-to-knative
+    Given delete KameletBinding aws-s3-to-knative-binding
     Given delete Kubernetes service event-consumer-service
     Given delete Knative broker default
 
diff --git a/experimental/test/aws-s3/aws-s3-to-knative.yaml b/test/experimental/aws-s3-exp/aws-s3-to-knative.yaml
similarity index 95%
rename from experimental/test/aws-s3/aws-s3-to-knative.yaml
rename to test/experimental/aws-s3-exp/aws-s3-to-knative.yaml
index afa1b572..117c3332 100644
--- a/experimental/test/aws-s3/aws-s3-to-knative.yaml
+++ b/test/experimental/aws-s3-exp/aws-s3-to-knative.yaml
@@ -18,13 +18,13 @@
 apiVersion: camel.apache.org/v1alpha1
 kind: KameletBinding
 metadata:
-  name: aws-s3-to-knative
+  name: aws-s3-to-knative-binding
 spec:
   source:
     ref:
       kind: Kamelet
       apiVersion: camel.apache.org/v1alpha1
-      name: aws-s3-source-experimental
+      name: aws-s3-experimental-source
     properties:
       bucketNameOrArn: ${aws.s3.bucketNameOrArn}
       overrideEndpoint: true
diff --git a/experimental/test/aws-s3/yaks-config.yaml b/test/experimental/aws-s3-exp/yaks-config.yaml
similarity index 94%
rename from experimental/test/aws-s3/yaks-config.yaml
rename to test/experimental/aws-s3-exp/yaks-config.yaml
index 6431eaf8..33d55aac 100644
--- a/experimental/test/aws-s3/yaks-config.yaml
+++ b/test/experimental/aws-s3-exp/yaks-config.yaml
@@ -63,7 +63,3 @@ config:
     failedOnly: true
     includes:
       - app=camel-k
-pre:
-  - name: Install experimental Kamelets
-    run: |
-      kubectl apply -f ../../aws-s3-source.exp.kamelet.yaml -n $YAKS_NAMESPACE


[camel-kamelets] 19/28: Remove camel-cloudevents dependency

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit e3912879c371e3f2450555cb2796a4b3d5648542
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Fri Nov 25 15:27:14 2022 +0100

    Remove camel-cloudevents dependency
    
    - Avoid having the additional dependency in favor of using plain String constants
---
 .github/workflows/yaks-tests.yaml                          |  5 +++++
 library/camel-kamelets-utils/pom.xml                       |  5 -----
 .../converter/aws2/s3/AWS2S3CloudEventOutputType.java      | 14 +++++++++-----
 .../converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java  |  7 +++----
 4 files changed, 17 insertions(+), 14 deletions(-)

diff --git a/.github/workflows/yaks-tests.yaml b/.github/workflows/yaks-tests.yaml
index e06d1751..4acd7c8a 100644
--- a/.github/workflows/yaks-tests.yaml
+++ b/.github/workflows/yaks-tests.yaml
@@ -109,7 +109,12 @@ jobs:
       run: |
         echo "Running tests"
         yaks run test/aws-ddb-sink $YAKS_RUN_OPTIONS
+        
         yaks run test/aws-s3/aws-s3-uri-binding.feature $YAKS_RUN_OPTIONS
+        yaks run test/aws-s3/aws-s3-source-property-conf.feature $YAKS_RUN_OPTIONS
+        yaks run test/aws-s3/aws-s3-source-secret-conf.feature $YAKS_RUN_OPTIONS
+        yaks run test/aws-s3/aws-s3-source-uri-conf.feature $YAKS_RUN_OPTIONS
+        
         yaks run test/extract-field-action $YAKS_RUN_OPTIONS
         yaks run test/insert-field-action $YAKS_RUN_OPTIONS
         yaks run test/mail-sink $YAKS_RUN_OPTIONS
diff --git a/library/camel-kamelets-utils/pom.xml b/library/camel-kamelets-utils/pom.xml
index 2aba210d..5b1441f3 100644
--- a/library/camel-kamelets-utils/pom.xml
+++ b/library/camel-kamelets-utils/pom.xml
@@ -82,11 +82,6 @@
             <artifactId>camel-aws2-s3</artifactId>
             <scope>provided</scope>
         </dependency>
-        <dependency>
-            <groupId>org.apache.camel</groupId>
-            <artifactId>camel-cloudevents</artifactId>
-            <!--<scope>provided</scope>-->
-        </dependency>
 
         <!-- Test scoped dependencies -->
         <dependency>
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
index 2eb5cb04..d1906f24 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
@@ -25,7 +25,6 @@ import java.util.Map;
 
 import org.apache.camel.Exchange;
 import org.apache.camel.component.aws2.s3.AWS2S3Constants;
-import org.apache.camel.component.cloudevents.CloudEvent;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
 
@@ -36,14 +35,19 @@ import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
 @DataType(scheme = "aws2-s3", name = "cloudevents", mediaType = "application/octet-stream")
 public class AWS2S3CloudEventOutputType implements DataTypeConverter {
 
+    static final String CAMEL_CLOUD_EVENT_TYPE = "CamelCloudEventType";
+    static final String CAMEL_CLOUD_EVENT_SOURCE = "CamelCloudEventSource";
+    static final String CAMEL_CLOUD_EVENT_SUBJECT = "CamelCloudEventSubject";
+    static final String CAMEL_CLOUD_EVENT_TIME = "CamelCloudEventTime";
+
     @Override
     public void convert(Exchange exchange) {
         final Map<String, Object> headers = exchange.getMessage().getHeaders();
 
-        headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TYPE, "kamelet.aws.s3.source");
-        headers.put(CloudEvent.CAMEL_CLOUD_EVENT_SOURCE, exchange.getMessage().getHeader(AWS2S3Constants.BUCKET_NAME, String.class));
-        headers.put(CloudEvent.CAMEL_CLOUD_EVENT_SUBJECT, exchange.getMessage().getHeader(AWS2S3Constants.KEY, String.class));
-        headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TIME, getEventTime(exchange));
+        headers.put(CAMEL_CLOUD_EVENT_TYPE, "kamelet.aws.s3.source");
+        headers.put(CAMEL_CLOUD_EVENT_SOURCE, exchange.getMessage().getHeader(AWS2S3Constants.BUCKET_NAME, String.class));
+        headers.put(CAMEL_CLOUD_EVENT_SUBJECT, exchange.getMessage().getHeader(AWS2S3Constants.KEY, String.class));
+        headers.put(CAMEL_CLOUD_EVENT_TIME, getEventTime(exchange));
     }
 
     private String getEventTime(Exchange exchange) {
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
index e139b2b9..f2d41606 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
@@ -24,7 +24,6 @@ import java.util.Optional;
 import org.apache.camel.CamelContextAware;
 import org.apache.camel.Exchange;
 import org.apache.camel.component.aws2.s3.AWS2S3Constants;
-import org.apache.camel.component.cloudevents.CloudEvents;
 import org.apache.camel.impl.DefaultCamelContext;
 import org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
@@ -53,9 +52,9 @@ class AWS2S3CloudEventOutputTypeTest {
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
         Assertions.assertTrue(exchange.getMessage().getHeaders().containsKey(AWS2S3Constants.KEY));
-        assertEquals("kamelet.aws.s3.source", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_TYPE));
-        assertEquals("test1.txt", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SUBJECT));
-        assertEquals("myBucket", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SOURCE));
+        assertEquals("kamelet.aws.s3.source", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_TYPE));
+        assertEquals("test1.txt", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_SUBJECT));
+        assertEquals("myBucket", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_SOURCE));
     }
 
     @Test


[camel-kamelets] 23/28: Improve CloudEvents output produced by AWS S3 source

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit c64f31b0f6ebb7667aca034a7d67954356cde5ff
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Wed Nov 30 13:02:55 2022 +0100

    Improve CloudEvents output produced by AWS S3 source
    
    - Align with CloudEvents spec in creating proper event type and source values
    - Enable Knative YAKS tests
---
 .github/actions/install-knative/action.yml         |  26 ++++
 .github/actions/install-knative/install-knative.sh | 142 +++++++++++++++++++++
 .github/workflows/yaks-tests.yaml                  |   7 +-
 .../aws2/s3/AWS2S3CloudEventOutputType.java        |   4 +-
 .../aws2/s3/AWS2S3CloudEventOutputTypeTest.java    |   4 +-
 test/aws-s3/aws-s3-cloudevents.feature             |   8 +-
 test/aws-s3/aws-s3-knative-binding.feature         |  16 ++-
 test/aws-s3/aws-s3-knative.feature                 |   5 +
 test/aws-s3/yaks-config.yaml                       |   2 +-
 test/utils/knative-channel-to-log.yaml             |  34 +++++
 10 files changed, 230 insertions(+), 18 deletions(-)

diff --git a/.github/actions/install-knative/action.yml b/.github/actions/install-knative/action.yml
new file mode 100644
index 00000000..24dd36f9
--- /dev/null
+++ b/.github/actions/install-knative/action.yml
@@ -0,0 +1,26 @@
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+
+name: install-knative
+description: 'Install Knative serving and eventing'
+runs:
+  using: "composite"
+  steps:
+    - name: Install Knative
+      shell: bash
+      run: |
+        ./.github/actions/install-knative/install-knative.sh
diff --git a/.github/actions/install-knative/install-knative.sh b/.github/actions/install-knative/install-knative.sh
new file mode 100755
index 00000000..8434afc9
--- /dev/null
+++ b/.github/actions/install-knative/install-knative.sh
@@ -0,0 +1,142 @@
+#!/bin/bash
+
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+
+####
+#
+# Install the knative setup
+#
+####
+
+set -e
+
+# Prerequisites
+sudo wget https://github.com/mikefarah/yq/releases/download/v4.26.1/yq_linux_amd64 -O /usr/bin/yq && sudo chmod +x /usr/bin/yq
+
+set +e
+
+export SERVING_VERSION=knative-v1.6.0
+export EVENTING_VERSION=knative-v1.6.0
+export KOURIER_VERSION=knative-v1.6.0
+
+apply() {
+  local file="${1:-}"
+  if [ -z "${file}" ]; then
+    echo "Error: Cannot apply. No file."
+    exit 1
+  fi
+
+  kubectl apply --filename ${file}
+  if [ $? != 0 ]; then
+    sleep 5
+    echo "Re-applying ${file} ..."
+    kubectl apply --filename ${file}
+    if [ $? != 0 ]; then
+      echo "Error: Application of resource failed."
+      exit 1
+    fi
+  fi
+}
+
+SERVING_CRDS="https://github.com/knative/serving/releases/download/${SERVING_VERSION}/serving-crds.yaml"
+SERVING_CORE="https://github.com/knative/serving/releases/download/${SERVING_VERSION}/serving-core.yaml"
+KOURIER="https://github.com/knative-sandbox/net-kourier/releases/download/${KOURIER_VERSION}/kourier.yaml"
+EVENTING_CRDS="https://github.com/knative/eventing/releases/download/${EVENTING_VERSION}/eventing-crds.yaml"
+EVENTING_CORE="https://github.com/knative/eventing/releases/download/${EVENTING_VERSION}/eventing-core.yaml"
+IN_MEMORY_CHANNEL="https://github.com/knative/eventing/releases/download/${EVENTING_VERSION}/in-memory-channel.yaml"
+CHANNEL_BROKER="https://github.com/knative/eventing/releases/download/${EVENTING_VERSION}/mt-channel-broker.yaml"
+
+# Serving
+apply "${SERVING_CRDS}"
+
+YAML=$(mktemp serving-core-XXX.yaml)
+curl -L -s ${SERVING_CORE} | head -n -1 | yq e 'del(.spec.template.spec.containers[].resources)' - > ${YAML}
+if [ -s ${YAML} ]; then
+  apply ${YAML}
+  echo "Waiting for pods to be ready in knative-serving (dependency for kourier)"
+  kubectl wait --for=condition=Ready pod --all -n knative-serving --timeout=60s
+else
+  echo "Error: Failed to correctly download ${SERVING_CORE}"
+  exit 1
+fi
+
+# Kourier
+apply "${KOURIER}"
+
+sleep 5
+
+kubectl patch configmap/config-network \
+  --namespace knative-serving \
+  --type merge \
+  --patch '{"data":{"ingress.class":"kourier.ingress.networking.knative.dev"}}'
+if [ $? != 0 ]; then
+  echo "Error: Failed to patch configmap"
+  exit 1
+fi
+
+# Eventing
+apply "${EVENTING_CRDS}"
+
+YAML=$(mktemp eventing-XXX.yaml)
+curl -L -s ${EVENTING_CORE} | head -n -1 | yq e 'del(.spec.template.spec.containers[].resources)' - > ${YAML}
+if [ -s ${YAML} ]; then
+  apply ${YAML}
+else
+  echo "Error: Failed to correctly download ${SERVING_CORE}"
+  exit 1
+fi
+
+# Eventing channels
+YAML=$(mktemp in-memory-XXX.yaml)
+curl -L -s ${IN_MEMORY_CHANNEL} | head -n -1 | yq e 'del(.spec.template.spec.containers[].resources)' - > ${YAML}
+if [ -s ${YAML} ]; then
+  apply ${YAML}
+else
+  echo "Error: Failed to correctly download ${SERVING_CORE}"
+  exit 1
+fi
+
+# Eventing broker
+YAML=$(mktemp channel-broker-XXX.yaml)
+curl -L -s ${CHANNEL_BROKER} | head -n -1 | yq e 'del(.spec.template.spec.containers[].resources)' - > ${YAML}
+if [ -s ${YAML} ]; then
+  apply ${YAML}
+else
+  echo "Error: Failed to correctly download ${SERVING_CORE}"
+  exit 1
+fi
+
+# Eventing sugar controller configuration
+echo "Patching Knative eventing configuration"
+kubectl patch configmap/config-sugar \
+  -n knative-eventing \
+  --type merge \
+  -p '{"data":{"namespace-selector":"{\"matchExpressions\":[{\"key\":\"eventing.knative.dev/injection\",\"operator\":\"In\",\"values\":[\"enabled\"]}]}"}}'
+
+kubectl patch configmap/config-sugar \
+  -n knative-eventing \
+  --type merge \
+  -p '{"data":{"trigger-selector":"{\"matchExpressions\":[{\"key\":\"eventing.knative.dev/injection\",\"operator\":\"In\",\"values\":[\"enabled\"]}]}"}}'
+
+# Wait for installation completed
+echo "Waiting for all pods to be ready in kourier-system"
+kubectl wait --for=condition=Ready pod --all -n kourier-system --timeout=60s
+echo "Waiting for all pods to be ready in knative-serving"
+kubectl wait --for=condition=Ready pod --all -n knative-serving --timeout=60s
+echo "Waiting for all pods to be ready in knative-eventing"
+kubectl wait --for=condition=Ready pod --all -n knative-eventing --timeout=60s
diff --git a/.github/workflows/yaks-tests.yaml b/.github/workflows/yaks-tests.yaml
index 4acd7c8a..73dcec77 100644
--- a/.github/workflows/yaks-tests.yaml
+++ b/.github/workflows/yaks-tests.yaml
@@ -91,6 +91,8 @@ jobs:
         kubectl version
         kubectl cluster-info
         kubectl describe nodes
+    - name: Install Knative
+      uses: ./.github/actions/install-knative
     - name: Install Camel K
       run: |
         # Configure install options
@@ -110,10 +112,7 @@ jobs:
         echo "Running tests"
         yaks run test/aws-ddb-sink $YAKS_RUN_OPTIONS
         
-        yaks run test/aws-s3/aws-s3-uri-binding.feature $YAKS_RUN_OPTIONS
-        yaks run test/aws-s3/aws-s3-source-property-conf.feature $YAKS_RUN_OPTIONS
-        yaks run test/aws-s3/aws-s3-source-secret-conf.feature $YAKS_RUN_OPTIONS
-        yaks run test/aws-s3/aws-s3-source-uri-conf.feature $YAKS_RUN_OPTIONS
+        yaks run test/aws-s3 $YAKS_RUN_OPTIONS
         
         yaks run test/extract-field-action $YAKS_RUN_OPTIONS
         yaks run test/insert-field-action $YAKS_RUN_OPTIONS
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
index d1906f24..4bc87192 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
@@ -44,8 +44,8 @@ public class AWS2S3CloudEventOutputType implements DataTypeConverter {
     public void convert(Exchange exchange) {
         final Map<String, Object> headers = exchange.getMessage().getHeaders();
 
-        headers.put(CAMEL_CLOUD_EVENT_TYPE, "kamelet.aws.s3.source");
-        headers.put(CAMEL_CLOUD_EVENT_SOURCE, exchange.getMessage().getHeader(AWS2S3Constants.BUCKET_NAME, String.class));
+        headers.put(CAMEL_CLOUD_EVENT_TYPE, "org.apache.camel.event.aws.s3.getObject");
+        headers.put(CAMEL_CLOUD_EVENT_SOURCE, "aws.s3.bucket." + exchange.getMessage().getHeader(AWS2S3Constants.BUCKET_NAME, String.class));
         headers.put(CAMEL_CLOUD_EVENT_SUBJECT, exchange.getMessage().getHeader(AWS2S3Constants.KEY, String.class));
         headers.put(CAMEL_CLOUD_EVENT_TIME, getEventTime(exchange));
     }
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
index f2d41606..084f4c16 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
@@ -52,9 +52,9 @@ class AWS2S3CloudEventOutputTypeTest {
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
         Assertions.assertTrue(exchange.getMessage().getHeaders().containsKey(AWS2S3Constants.KEY));
-        assertEquals("kamelet.aws.s3.source", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_TYPE));
+        assertEquals("org.apache.camel.event.aws.s3.getObject", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_TYPE));
         assertEquals("test1.txt", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_SUBJECT));
-        assertEquals("myBucket", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_SOURCE));
+        assertEquals("aws.s3.bucket.myBucket", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_SOURCE));
     }
 
     @Test
diff --git a/test/aws-s3/aws-s3-cloudevents.feature b/test/aws-s3/aws-s3-cloudevents.feature
index 1e2f7d1e..52ac84c5 100644
--- a/test/aws-s3/aws-s3-cloudevents.feature
+++ b/test/aws-s3/aws-s3-cloudevents.feature
@@ -20,6 +20,10 @@ Feature: AWS S3 Kamelet - cloud events data type
     Given New global Camel context
     Given load to Camel registry amazonS3Client.groovy
 
+  Scenario: Create Knative broker
+    Given create Knative broker default
+    And Knative broker default is running
+
   Scenario: Create AWS-S3 Kamelet to Knative binding
     Given variable loginfo is "Installed features"
     When load KameletBinding aws-s3-to-knative.yaml
@@ -35,8 +39,8 @@ Feature: AWS S3 Kamelet - cloud events data type
     Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message}
     Then expect Knative event data: ${aws.s3.message}
     And verify Knative event
-      | type            | kamelet.aws.s3.source |
-      | source          | ${aws.s3.bucketNameOrArn} |
+      | type            | org.apache.camel.event.aws.s3.getObject |
+      | source          | aws.s3.bucket.${aws.s3.bucketNameOrArn} |
       | subject         | ${aws.s3.key} |
       | id              | @ignore@ |
 
diff --git a/test/aws-s3/aws-s3-knative-binding.feature b/test/aws-s3/aws-s3-knative-binding.feature
index c143bbee..e94ab715 100644
--- a/test/aws-s3/aws-s3-knative-binding.feature
+++ b/test/aws-s3/aws-s3-knative-binding.feature
@@ -17,31 +17,33 @@ Feature: AWS S3 Kamelet - binding to Knative
     Given New global Camel context
     Given load to Camel registry amazonS3Client.groovy
 
-  Scenario: Create Knative broker
+  Scenario: Create Knative broker and channel
     Given create Knative broker default
     And Knative broker default is running
+    And create Knative channel messages
 
   Scenario: Create AWS-S3 Kamelet to InMemoryChannel binding
     Given variable loginfo is "Installed features"
     Given load KameletBinding aws-s3-to-knative.yaml
-    Given load KameletBinding knative-to-log.yaml
+    Given load KameletBinding knative-channel-to-log.yaml
     Then KameletBinding aws-s3-to-knative should be available
-    And KameletBinding knative-to-log should be available
+    And KameletBinding knative-channel-to-log should be available
     And Camel K integration aws-s3-to-knative is running
-    And Camel K integration knative-to-log is running
+    And Camel K integration knative-channel-to-log is running
     And Camel K integration aws-s3-to-knative should print ${loginfo}
-    And Camel K integration knative-to-log should print ${loginfo}
+    And Camel K integration knative-channel-to-log should print ${loginfo}
     Then sleep 10000 ms
 
   Scenario: Verify Kamelet source
     Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}"
     Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message}
-    Then Camel K integration knative-to-log should print ${aws.s3.message}
+    Then Camel K integration knative-channel-to-log should print ${aws.s3.message}
 
   Scenario: Remove resources
     Given delete KameletBinding aws-s3-to-knative
-    Given delete KameletBinding knative-to-log
+    Given delete KameletBinding knative-channel-to-log
     Given delete Knative broker default
+    Given delete Knative channel messages
 
   Scenario: Stop container
     Given stop LocalStack container
diff --git a/test/aws-s3/aws-s3-knative.feature b/test/aws-s3/aws-s3-knative.feature
index 148ec1d6..fe080fa2 100644
--- a/test/aws-s3/aws-s3-knative.feature
+++ b/test/aws-s3/aws-s3-knative.feature
@@ -20,6 +20,10 @@ Feature: AWS S3 Kamelet - Knative binding
     Given New global Camel context
     Given load to Camel registry amazonS3Client.groovy
 
+  Scenario: Create Knative broker
+    Given create Knative broker default
+    And Knative broker default is running
+
   Scenario: Create AWS-S3 Kamelet to Knative binding
     Given variable loginfo is "Installed features"
     When load KameletBinding aws-s3-to-knative.yaml
@@ -42,6 +46,7 @@ Feature: AWS S3 Kamelet - Knative binding
   Scenario: Remove Camel K resources
     Given delete KameletBinding aws-s3-to-knative
     Given delete Kubernetes service event-consumer-service
+    Given delete Knative broker default
 
   Scenario: Stop container
     Given stop LocalStack container
diff --git a/test/aws-s3/yaks-config.yaml b/test/aws-s3/yaks-config.yaml
index 0d70ba75..4ef910c4 100644
--- a/test/aws-s3/yaks-config.yaml
+++ b/test/aws-s3/yaks-config.yaml
@@ -43,7 +43,7 @@ config:
       - aws-s3-to-log-secret-based.groovy
       - aws-s3-uri-binding.yaml
       - aws-s3-to-knative.yaml
-      - ../utils/knative-to-log.yaml
+      - ../utils/knative-channel-to-log.yaml
     cucumber:
       tags:
         - "not @ignored"
diff --git a/test/utils/knative-channel-to-log.yaml b/test/utils/knative-channel-to-log.yaml
new file mode 100644
index 00000000..4fc551c6
--- /dev/null
+++ b/test/utils/knative-channel-to-log.yaml
@@ -0,0 +1,34 @@
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+
+apiVersion: camel.apache.org/v1alpha1
+kind: KameletBinding
+metadata:
+  name: knative-channel-to-log
+spec:
+  source:
+    ref:
+      kind: InMemoryChannel
+      apiVersion: messaging.knative.dev/v1
+      name: messages
+  sink:
+    ref:
+      kind: Kamelet
+      apiVersion: camel.apache.org/v1alpha1
+      name: log-sink
+    properties:
+      showHeaders: true


[camel-kamelets] 25/28: Fix Knative YAKS tests

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit f6c2424908d3147424a25e134a88035ec5f8f5d1
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Wed Nov 30 17:45:51 2022 +0100

    Fix Knative YAKS tests
---
 test/aws-s3/aws-s3-cloudevents.feature     |  3 +-
 test/aws-s3/aws-s3-knative-binding.feature | 12 ++++----
 test/aws-s3/aws-s3-knative.feature         |  1 +
 test/aws-s3/aws-s3-to-knative-channel.yaml | 46 ++++++++++++++++++++++++++++++
 test/aws-s3/yaks-config.yaml               |  1 +
 5 files changed, 56 insertions(+), 7 deletions(-)

diff --git a/test/aws-s3/aws-s3-cloudevents.feature b/test/aws-s3/aws-s3-cloudevents.feature
index 52ac84c5..5774b738 100644
--- a/test/aws-s3/aws-s3-cloudevents.feature
+++ b/test/aws-s3/aws-s3-cloudevents.feature
@@ -1,3 +1,4 @@
+@knative
 Feature: AWS S3 Kamelet - cloud events data type
 
   Background:
@@ -34,7 +35,7 @@ Feature: AWS S3 Kamelet - cloud events data type
   Scenario: Verify Kamelet source
     Given create Knative event consumer service event-consumer-service
     Given create Knative trigger event-service-trigger on service event-consumer-service with filter on attributes
-      | type   | kamelet.aws.s3.source |
+      | type   | org.apache.camel.event.aws.s3.getObject |
     Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}"
     Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message}
     Then expect Knative event data: ${aws.s3.message}
diff --git a/test/aws-s3/aws-s3-knative-binding.feature b/test/aws-s3/aws-s3-knative-binding.feature
index e94ab715..cf67b4c9 100644
--- a/test/aws-s3/aws-s3-knative-binding.feature
+++ b/test/aws-s3/aws-s3-knative-binding.feature
@@ -1,5 +1,5 @@
 @knative
-Feature: AWS S3 Kamelet - binding to Knative
+Feature: AWS S3 Kamelet - binding to Knative channel
 
   Background:
     Given Kamelet aws-s3-source is available
@@ -24,13 +24,13 @@ Feature: AWS S3 Kamelet - binding to Knative
 
   Scenario: Create AWS-S3 Kamelet to InMemoryChannel binding
     Given variable loginfo is "Installed features"
-    Given load KameletBinding aws-s3-to-knative.yaml
+    Given load KameletBinding aws-s3-to-knative-channel.yaml
     Given load KameletBinding knative-channel-to-log.yaml
-    Then KameletBinding aws-s3-to-knative should be available
+    Then KameletBinding aws-s3-to-knative-channel should be available
     And KameletBinding knative-channel-to-log should be available
-    And Camel K integration aws-s3-to-knative is running
+    And Camel K integration aws-s3-to-knative-channel is running
     And Camel K integration knative-channel-to-log is running
-    And Camel K integration aws-s3-to-knative should print ${loginfo}
+    And Camel K integration aws-s3-to-knative-channel should print ${loginfo}
     And Camel K integration knative-channel-to-log should print ${loginfo}
     Then sleep 10000 ms
 
@@ -40,7 +40,7 @@ Feature: AWS S3 Kamelet - binding to Knative
     Then Camel K integration knative-channel-to-log should print ${aws.s3.message}
 
   Scenario: Remove resources
-    Given delete KameletBinding aws-s3-to-knative
+    Given delete KameletBinding aws-s3-to-knative-channel
     Given delete KameletBinding knative-channel-to-log
     Given delete Knative broker default
     Given delete Knative channel messages
diff --git a/test/aws-s3/aws-s3-knative.feature b/test/aws-s3/aws-s3-knative.feature
index fe080fa2..dc358797 100644
--- a/test/aws-s3/aws-s3-knative.feature
+++ b/test/aws-s3/aws-s3-knative.feature
@@ -1,3 +1,4 @@
+@knative
 Feature: AWS S3 Kamelet - Knative binding
 
   Background:
diff --git a/test/aws-s3/aws-s3-to-knative-channel.yaml b/test/aws-s3/aws-s3-to-knative-channel.yaml
new file mode 100644
index 00000000..5383ae61
--- /dev/null
+++ b/test/aws-s3/aws-s3-to-knative-channel.yaml
@@ -0,0 +1,46 @@
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+
+apiVersion: camel.apache.org/v1alpha1
+kind: KameletBinding
+metadata:
+  name: aws-s3-to-knative-channel
+spec:
+  source:
+    ref:
+      kind: Kamelet
+      apiVersion: camel.apache.org/v1alpha1
+      name: aws-s3-source
+    properties:
+      bucketNameOrArn: ${aws.s3.bucketNameOrArn}
+      overrideEndpoint: true
+      uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}
+      accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}
+      secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}
+      region: ${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}
+  steps:
+    - ref:
+        kind: Kamelet
+        apiVersion: camel.apache.org/v1alpha1
+        name: log-sink
+      properties:
+        showHeaders: true
+  sink:
+    ref:
+      kind: InMemoryChannel
+      apiVersion: messaging.knative.dev/v1
+      name: messages
diff --git a/test/aws-s3/yaks-config.yaml b/test/aws-s3/yaks-config.yaml
index 4ef910c4..6f1a0d0d 100644
--- a/test/aws-s3/yaks-config.yaml
+++ b/test/aws-s3/yaks-config.yaml
@@ -43,6 +43,7 @@ config:
       - aws-s3-to-log-secret-based.groovy
       - aws-s3-uri-binding.yaml
       - aws-s3-to-knative.yaml
+      - aws-s3-to-knative-channel.yaml
       - ../utils/knative-channel-to-log.yaml
     cucumber:
       tags:


[camel-kamelets] 11/28: Load AWS DDB converters via annotation scan

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit f142f382207f1eda88eb56630b4026b0222123ce
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Fri Nov 18 19:52:07 2022 +0100

    Load AWS DDB converters via annotation scan
---
 .../main/resources/META-INF/services/org/apache/camel/DataTypeConverter  | 1 +
 1 file changed, 1 insertion(+)

diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter
index 46b63db2..81e10256 100644
--- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter
+++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter
@@ -17,3 +17,4 @@
 
 org.apache.camel.kamelets.utils.format.converter.standard
 org.apache.camel.kamelets.utils.format.converter.aws2.s3
+org.apache.camel.kamelets.utils.format.converter.aws2.ddb


[camel-kamelets] 27/28: Add experimental Kamelets using data type converter API

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit b45bb5dd8eeb5138a11d28c1e6ab497f3f427df7
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Wed Nov 30 22:19:41 2022 +0100

    Add experimental Kamelets using data type converter API
---
 .github/workflows/yaks-tests.yaml                  |   6 +
 experimental/aws-ddb-sink.exp.kamelet.yaml         | 146 ++++++++++++++++++
 experimental/aws-s3-source.exp.kamelet.yaml        | 165 +++++++++++++++++++++
 .../test/aws-ddb-sink/amazonDDBClient.groovy       |  53 +++++++
 .../test/aws-ddb-sink/aws-ddb-sink-binding.yaml    |  33 +++--
 .../aws-ddb-sink/aws-ddb-sink-deleteItem.feature   |  65 ++++++++
 .../test/aws-ddb-sink/aws-ddb-sink-putItem.feature |  58 ++++++++
 .../aws-ddb-sink/aws-ddb-sink-updateItem.feature   |  68 +++++++++
 experimental/test/aws-ddb-sink/putItem.groovy      |  30 ++++
 experimental/test/aws-ddb-sink/verifyItems.groovy  |  18 +++
 .../test/aws-ddb-sink}/yaks-config.yaml            |  29 ++--
 experimental/test/aws-s3/amazonS3Client.groovy     |  36 +++++
 .../test}/aws-s3/aws-s3-cloudevents.feature        |   2 +
 .../test}/aws-s3/aws-s3-knative.feature            |   2 +
 .../test}/aws-s3/aws-s3-to-knative.yaml            |   2 +-
 .../test}/aws-s3/yaks-config.yaml                  |  10 +-
 test/aws-s3/yaks-config.yaml                       |   1 -
 17 files changed, 682 insertions(+), 42 deletions(-)

diff --git a/.github/workflows/yaks-tests.yaml b/.github/workflows/yaks-tests.yaml
index 73dcec77..7f168ca2 100644
--- a/.github/workflows/yaks-tests.yaml
+++ b/.github/workflows/yaks-tests.yaml
@@ -65,6 +65,7 @@ jobs:
 
         # Overwrite JitPack coordinates in the local Kamelets so the tests can use the utility classes in this PR
         find kamelets -maxdepth 1 -name '*.kamelet.yaml' -exec sed -i "s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT/g" {} +
+        find experimental -maxdepth 1 -name '*.kamelet.yaml' -exec sed -i "s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT/g" {} +
     - name: Get Camel K CLI
       run: |
         curl --fail -L --silent https://github.com/apache/camel-k/releases/download/v${CAMEL_K_VERSION}/camel-k-client-${CAMEL_K_VERSION}-linux-64bit.tar.gz -o kamel.tar.gz
@@ -121,6 +122,11 @@ jobs:
         yaks run test/earthquake-source $YAKS_RUN_OPTIONS
         yaks run test/rest-openapi-sink $YAKS_RUN_OPTIONS
         yaks run test/kafka $YAKS_RUN_OPTIONS
+    - name: YAKS Tests on experimental Kamelets
+      run: |
+        echo "Running tests for experimental Kamelets"
+        yaks run experimental/test/aws-ddb-sink $YAKS_RUN_OPTIONS
+        yaks run experimental/test/aws-s3 $YAKS_RUN_OPTIONS
     - name: YAKS Report
       if: failure()
       run: |
diff --git a/experimental/aws-ddb-sink.exp.kamelet.yaml b/experimental/aws-ddb-sink.exp.kamelet.yaml
new file mode 100644
index 00000000..e19185fa
--- /dev/null
+++ b/experimental/aws-ddb-sink.exp.kamelet.yaml
@@ -0,0 +1,146 @@
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+
+apiVersion: camel.apache.org/v1alpha1
+kind: Kamelet
+metadata:
+  name: aws-ddb-sink-experimental
+  annotations:
+    camel.apache.org/kamelet.support.level: "Experiemental"
+    camel.apache.org/catalog.version: "main-SNAPSHOT"
+    camel.apache.org/kamelet.icon: "data:image/svg+xml;base64,PHN2ZyBoZWlnaHQ9IjEwMCIgd2lkdGg9IjEwMCIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48cGF0aCBmaWxsPSIjMkQ3MkI4IiBkPSJNNzQuMTc0IDMxLjgwN2w3LjQzNyA1LjM2N3YtNy42MDJsLTcuNDgtOC43NjV2MTAuOTU3bC4wNDMuMDE1eiIvPjxwYXRoIGZpbGw9IiM1Mjk0Q0YiIGQ9Ik01OS44MzggODUuNjY2bDE0LjI5My03LjE0NlYyMC43OTFsLTE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMjA1Qjk4IiBkPSJNMzkuNDk2IDg1LjY2NkwyNS4yMDMgNzguNTJWMjAuNzkxbDE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMkQ3Mk [...]
+    camel.apache.org/provider: "Apache Software Foundation"
+    camel.apache.org/kamelet.group: "AWS DynamoDB Streams"
+  labels:
+    camel.apache.org/kamelet.type: "sink"
+spec:
+  definition:
+    title: "AWS DynamoDB Sink"
+    description: |-
+      Send data to Amazon DynamoDB. The sent data inserts, updates, or deletes an item on the specified AWS DynamoDB table.
+
+      The basic authentication method for the AWS DynamoDB service is to specify an access key and a secret key. These parameters are optional because the Kamelet provides a default credentials provider.
+
+      If you use the default credentials provider, the DynamoDB client loads the credentials through this provider and doesn't use the basic authentication method.
+
+      This Kamelet expects a JSON-formatted body and it must include the primary key values that define the DynamoDB item. The mapping between the JSON fields and table attribute values is done by key. For example, for  '{"username":"oscerd", "city":"Rome"}' input, the Kamelet inserts or update an item in the specified AWS DynamoDB table and sets the values for the 'username' and 'city' attributes. 
+    required:
+      - table
+      - region
+    type: object
+    properties:
+      table:
+        title: Table
+        description: The name of the DynamoDB table.
+        type: string
+      accessKey:
+        title: Access Key
+        description: The access key obtained from AWS.
+        type: string
+        format: password
+        x-descriptors:
+        - urn:alm:descriptor:com.tectonic.ui:password
+        - urn:camel:group:credentials
+      secretKey:
+        title: Secret Key
+        description: The secret key obtained from AWS.
+        type: string
+        format: password
+        x-descriptors:
+        - urn:alm:descriptor:com.tectonic.ui:password
+        - urn:camel:group:credentials
+      region:
+        title: AWS Region
+        description: The AWS region to access.
+        type: string
+        enum: ["ap-south-1", "eu-south-1", "us-gov-east-1", "me-central-1", "ca-central-1", "eu-central-1", "us-iso-west-1", "us-west-1", "us-west-2", "af-south-1", "eu-north-1", "eu-west-3", "eu-west-2", "eu-west-1", "ap-northeast-3", "ap-northeast-2", "ap-northeast-1", "me-south-1", "sa-east-1", "ap-east-1", "cn-north-1", "us-gov-west-1", "ap-southeast-1", "ap-southeast-2", "us-iso-east-1", "ap-southeast-3", "us-east-1", "us-east-2", "cn-northwest-1", "us-isob-east-1", "aws-global", "a [...]
+      operation:
+        title: Operation
+        description: "The operation to perform. The options are PutItem, UpdateItem, or DeleteItem."
+        type: string
+        default: PutItem
+        example: PutItem
+      writeCapacity:
+        title: Write Capacity
+        description: The provisioned throughput to reserve for writing resources to your table.
+        type: integer
+        default: 1
+      useDefaultCredentialsProvider:
+        title: Default Credentials Provider
+        description: If true, the DynamoDB client loads credentials through a default credentials provider. If false, it uses the basic authentication method (access key and secret key).
+        type: boolean
+        x-descriptors:
+          - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: false
+      uriEndpointOverride:
+        title: Overwrite Endpoint URI
+        description: The overriding endpoint URI. To use this option, you must also select the `overrideEndpoint` option.
+        type: string
+      overrideEndpoint:
+        title: Endpoint Overwrite
+        description: Select this option to override the endpoint URI. To use this option, you must also provide a URI for the `uriEndpointOverride` option.
+        type: boolean
+        x-descriptors:
+          - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: false
+      inputFormat:
+        title: Input Type
+        description: Specify the input type for this Kamelet. The Kamelet will automatically apply conversion logic in order to transform message content to this data type.
+        type: string
+        default: json
+        example: json
+  types:
+    in:
+      mediaType: application/json
+  dependencies:
+  - github:apache.camel-kamelets:camel-kamelets-utils:main-SNAPSHOT
+  - "camel:core"
+  - "camel:jackson"
+  - "camel:aws2-ddb"
+  - "camel:kamelet"
+  template:
+    beans:
+    - name: dataTypeRegistry
+      type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry"
+    - name: inputTypeProcessor
+      type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor"
+      property:
+        - key: scheme
+          value: 'aws2-ddb'
+        - key: format
+          value: '{{inputFormat}}'
+        - key: registry
+          value: '#bean:{{dataTypeRegistry}}'
+    from:
+      uri: "kamelet:source"
+      steps:
+      - set-property:
+          name: operation
+          constant: "{{operation}}"
+      - process:
+          ref: "{{inputTypeProcessor}}"
+      - to:
+          uri: "aws2-ddb:{{table}}"
+          parameters:
+            secretKey: "{{?secretKey}}"
+            accessKey: "{{?accessKey}}"
+            region: "{{region}}"
+            operation: "{{operation}}"
+            writeCapacity: "{{?writeCapacity}}"
+            useDefaultCredentialsProvider: "{{useDefaultCredentialsProvider}}"
+            uriEndpointOverride: "{{?uriEndpointOverride}}"
+            overrideEndpoint: "{{overrideEndpoint}}"
diff --git a/experimental/aws-s3-source.exp.kamelet.yaml b/experimental/aws-s3-source.exp.kamelet.yaml
new file mode 100644
index 00000000..7a8d8fe5
--- /dev/null
+++ b/experimental/aws-s3-source.exp.kamelet.yaml
@@ -0,0 +1,165 @@
+apiVersion: camel.apache.org/v1alpha1
+kind: Kamelet
+metadata:
+  name: aws-s3-source-experimental
+  annotations:
+    camel.apache.org/kamelet.support.level: "Experimental"
+    camel.apache.org/catalog.version: "main-SNAPSHOT"
+    camel.apache.org/kamelet.icon: "data:image/svg+xml;base64,PHN2ZyB2ZXJzaW9uPSIxLjEiIGlkPSJMYXllcl8xIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHg9IjAiIHk9IjAiIHZpZXdCb3g9IjAgMCAyNDguMiAzMDAiIHhtbDpzcGFjZT0icHJlc2VydmUiPjxzdHlsZT4uc3QyOHtmaWxsOiM4YzMxMjN9LnN0Mjl7ZmlsbDojZTA1MjQzfTwvc3R5bGU+PHBhdGggY2xhc3M9InN0MjgiIGQ9Ik0yMCA1Mi4xTDAgNjJ2MTc1LjVsMjAgOS45LjEtLjFWNTIuMmwtLjEtLjEiLz48cGF0aCBjbGFzcz0ic3QyOSIgZD0iTTEyNyAyMjJMMjAgMjQ3LjVWNTIuMUwxMjcgNzd2MTQ1Ii8+PHBhdGggY2xhc3M9InN0MjgiIG [...]
+    camel.apache.org/provider: "Apache Software Foundation"
+    camel.apache.org/kamelet.group: "AWS S3"
+  labels:
+    camel.apache.org/kamelet.type: "source"
+spec:
+  definition:
+    title: "AWS S3 Source"
+    description: |-
+      Receive data from an Amazon S3 Bucket.
+
+      The basic authentication method for the S3 service is to specify an access key and a secret key. These parameters are optional because the Kamelet provides a default credentials provider.
+      
+      If you use the default credentials provider, the S3 client loads the credentials through this provider and doesn't use the basic authentication method.
+
+      Two headers will be duplicated with different names for clarity at sink level, CamelAwsS3Key will be duplicated into aws.s3.key and CamelAwsS3BucketName will be duplicated in aws.s3.bucket.name
+    required:
+      - bucketNameOrArn
+      - region
+    type: object
+    properties:
+      bucketNameOrArn:
+        title: Bucket Name
+        description: The S3 Bucket name or Amazon Resource Name (ARN).
+        type: string
+      deleteAfterRead:
+        title: Auto-delete Objects
+        description: Specifies to delete objects after consuming them.
+        type: boolean
+        x-descriptors:
+        - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: true
+      accessKey:
+        title: Access Key
+        description: The access key obtained from AWS.
+        type: string
+        format: password
+        x-descriptors:
+        - urn:alm:descriptor:com.tectonic.ui:password
+        - urn:camel:group:credentials
+      secretKey:
+        title: Secret Key
+        description: The secret key obtained from AWS.
+        type: string
+        format: password
+        x-descriptors:
+        - urn:alm:descriptor:com.tectonic.ui:password
+        - urn:camel:group:credentials
+      region:
+        title: AWS Region
+        description: The AWS region to access.
+        type: string
+        enum: ["ap-south-1", "eu-south-1", "us-gov-east-1", "me-central-1", "ca-central-1", "eu-central-1", "us-iso-west-1", "us-west-1", "us-west-2", "af-south-1", "eu-north-1", "eu-west-3", "eu-west-2", "eu-west-1", "ap-northeast-3", "ap-northeast-2", "ap-northeast-1", "me-south-1", "sa-east-1", "ap-east-1", "cn-north-1", "us-gov-west-1", "ap-southeast-1", "ap-southeast-2", "us-iso-east-1", "ap-southeast-3", "us-east-1", "us-east-2", "cn-northwest-1", "us-isob-east-1", "aws-global", "a [...]
+      autoCreateBucket:
+        title: Autocreate Bucket
+        description: Specifies to automatically create the S3 bucket.
+        type: boolean
+        x-descriptors:
+        - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: false
+      includeBody:
+        title: Include Body
+        description: If true, the exchange is consumed and put into the body and closed. If false, the S3Object stream is put raw into the body and the headers are set with the S3 object metadata.
+        type: boolean
+        x-descriptors:
+        - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: true
+      prefix:
+        title: Prefix
+        description: The AWS S3 bucket prefix to consider while searching.
+        type: string
+        example: 'folder/'
+      ignoreBody:
+        title: Ignore Body
+        description: If true, the S3 Object body is ignored. Setting this to true overrides any behavior defined by the `includeBody` option. If false, the S3 object is put in the body.
+        type: boolean
+        x-descriptors:
+        - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: false
+      useDefaultCredentialsProvider:
+        title: Default Credentials Provider
+        description: If true, the S3 client loads credentials through a default credentials provider. If false, it uses the basic authentication method (access key and secret key).
+        type: boolean
+        x-descriptors:
+        - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: false
+      uriEndpointOverride:
+        title: Overwrite Endpoint URI
+        description: The overriding endpoint URI. To use this option, you must also select the `overrideEndpoint` option.
+        type: string
+      overrideEndpoint:
+        title: Endpoint Overwrite
+        description: Select this option to override the endpoint URI. To use this option, you must also provide a URI for the `uriEndpointOverride` option.
+        type: boolean
+        x-descriptors:
+          - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
+        default: false
+      delay:
+        title: Delay
+        description: The number of milliseconds before the next poll of the selected bucket.
+        type: integer
+        default: 500
+      outputFormat:
+        title: Output Type
+        description: Choose the output type for this Kamelet. The Kamelet supports different output types and performs automatic message conversion according to this data type.
+        type: string
+        default: binary
+        example: binary
+  dependencies:
+    - "camel:core"
+    - "camel:aws2-s3"
+    - "github:apache.camel-kamelets:camel-kamelets-utils:main-SNAPSHOT"
+    - "camel:kamelet"
+  template:
+    beans:
+      - name: dataTypeRegistry
+        type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry"
+      - name: outputTypeProcessor
+        type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor"
+        property:
+          - key: scheme
+            value: 'aws2-s3'
+          - key: format
+            value: '{{outputFormat}}'
+          - key: registry
+            value: '#bean:{{dataTypeRegistry}}'
+      - name: renameHeaders
+        type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders"
+        property:
+          - key: prefix
+            value: 'CamelAwsS3'
+          - key: renamingPrefix
+            value: 'aws.s3.'
+          - key: mode
+            value: 'filtering'
+          - key: selectedHeaders
+            value: 'CamelAwsS3Key,CamelAwsS3BucketName'
+    from:
+      uri: "aws2-s3:{{bucketNameOrArn}}"
+      parameters:
+        autoCreateBucket: "{{autoCreateBucket}}"
+        secretKey: "{{?secretKey}}"
+        accessKey: "{{?accessKey}}"
+        region: "{{region}}"
+        includeBody: "{{includeBody}}"
+        ignoreBody: "{{ignoreBody}}"
+        deleteAfterRead: "{{deleteAfterRead}}"
+        prefix: "{{?prefix}}"
+        useDefaultCredentialsProvider: "{{useDefaultCredentialsProvider}}"
+        uriEndpointOverride: "{{?uriEndpointOverride}}"
+        overrideEndpoint: "{{overrideEndpoint}}"
+        delay: "{{delay}}"
+      steps:
+      - process:
+          ref: "{{renameHeaders}}"
+      - process:
+          ref: "{{outputTypeProcessor}}"
+      - to: "kamelet:sink"
diff --git a/experimental/test/aws-ddb-sink/amazonDDBClient.groovy b/experimental/test/aws-ddb-sink/amazonDDBClient.groovy
new file mode 100644
index 00000000..dc0b2a8b
--- /dev/null
+++ b/experimental/test/aws-ddb-sink/amazonDDBClient.groovy
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials
+import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider
+import software.amazon.awssdk.regions.Region
+import software.amazon.awssdk.services.dynamodb.DynamoDbClient
+import software.amazon.awssdk.services.dynamodb.model.AttributeDefinition
+import software.amazon.awssdk.services.dynamodb.model.KeySchemaElement
+import software.amazon.awssdk.services.dynamodb.model.KeyType
+import software.amazon.awssdk.services.dynamodb.model.ProvisionedThroughput
+import software.amazon.awssdk.services.dynamodb.model.ScalarAttributeType
+
+DynamoDbClient amazonDDBClient = DynamoDbClient
+        .builder()
+        .endpointOverride(URI.create("${YAKS_TESTCONTAINERS_LOCALSTACK_DYNAMODB_URL}"))
+        .credentialsProvider(StaticCredentialsProvider.create(
+                AwsBasicCredentials.create(
+                        "${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}",
+                        "${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}")
+        ))
+        .region(Region.of("${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}"))
+        .build()
+
+amazonDDBClient.createTable(b -> {
+        b.tableName("${aws.ddb.tableName}")
+        b.keySchema(
+                KeySchemaElement.builder().attributeName("id").keyType(KeyType.HASH).build(),
+        )
+        b.attributeDefinitions(
+                AttributeDefinition.builder().attributeName("id").attributeType(ScalarAttributeType.N).build(),
+        )
+        b.provisionedThroughput(
+                ProvisionedThroughput.builder()
+                        .readCapacityUnits(1L)
+                        .writeCapacityUnits(1L).build())
+})
+
+return amazonDDBClient
diff --git a/test/aws-s3/aws-s3-to-knative.yaml b/experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml
similarity index 75%
copy from test/aws-s3/aws-s3-to-knative.yaml
copy to experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml
index e99ee20f..6b4b2b02 100644
--- a/test/aws-s3/aws-s3-to-knative.yaml
+++ b/experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml
@@ -18,30 +18,33 @@
 apiVersion: camel.apache.org/v1alpha1
 kind: KameletBinding
 metadata:
-  name: aws-s3-to-knative
+  name: aws-ddb-sink-binding
 spec:
   source:
     ref:
       kind: Kamelet
       apiVersion: camel.apache.org/v1alpha1
-      name: aws-s3-source
+      name: timer-source
     properties:
-      bucketNameOrArn: ${aws.s3.bucketNameOrArn}
-      overrideEndpoint: true
-      outputFormat: ${aws.s3.output}
-      uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}
-      accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}
-      secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}
-      region: ${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}
+      period: ${timer.source.period}
+      message: '${aws.ddb.json.data}'
   steps:
     - ref:
         kind: Kamelet
         apiVersion: camel.apache.org/v1alpha1
-        name: log-sink
-      properties:
-        showHeaders: true
+        name: log-action
+        properties:
+          showHeaders: true
   sink:
     ref:
-      kind: Broker
-      apiVersion: eventing.knative.dev/v1
-      name: default
+      kind: Kamelet
+      apiVersion: camel.apache.org/v1alpha1
+      name: aws-ddb-sink-experimental
+    properties:
+      table: ${aws.ddb.tableName}
+      operation: ${aws.ddb.operation}
+      overrideEndpoint: true
+      uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_DYNAMODB_URL}
+      accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}
+      secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}
+      region: ${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}
diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature b/experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature
new file mode 100644
index 00000000..6c54fdc3
--- /dev/null
+++ b/experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature
@@ -0,0 +1,65 @@
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+@experimental
+Feature: AWS DDB Sink - DeleteItem
+
+  Background:
+    Given Kamelet aws-ddb-sink-experimental is available
+    Given Camel K resource polling configuration
+      | maxAttempts          | 200   |
+      | delayBetweenAttempts | 2000  |
+    Given variables
+      | timer.source.period  | 10000 |
+      | aws.ddb.operation    | DeleteItem |
+      | aws.ddb.tableName    | movies |
+      | aws.ddb.item.id      | 1 |
+      | aws.ddb.item.year    | 1985 |
+      | aws.ddb.item.title   | Back to the future |
+      | aws.ddb.json.data    | {"id": ${aws.ddb.item.id}} |
+
+  Scenario: Start LocalStack container
+    Given Enable service DYNAMODB
+    Given start LocalStack container
+    And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}'
+
+  Scenario: Create AWS-DDB client
+    Given New global Camel context
+    Given load to Camel registry amazonDDBClient.groovy
+
+  Scenario: Create item on AWS-DDB
+    Given run script putItem.groovy
+    Given variables
+      | aws.ddb.items     | [{year=AttributeValue(N=${aws.ddb.item.year}), id=AttributeValue(N=${aws.ddb.item.id}), title=AttributeValue(S=${aws.ddb.item.title})}] |
+    Then run script verifyItems.groovy
+
+  Scenario: Create AWS-DDB Kamelet sink binding
+    When load KameletBinding aws-ddb-sink-binding.yaml
+    And KameletBinding aws-ddb-sink-binding is available
+    And Camel K integration aws-ddb-sink-binding is running
+    And Camel K integration aws-ddb-sink-binding should print Routes startup
+    Then sleep 10sec
+
+  Scenario: Verify Kamelet sink
+    Given variables
+      | aws.ddb.items     | [] |
+    Then run script verifyItems.groovy
+
+  Scenario: Remove Camel K resources
+    Given delete KameletBinding aws-ddb-sink-binding
+
+  Scenario: Stop container
+    Given stop LocalStack container
diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature b/experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature
new file mode 100644
index 00000000..f117889b
--- /dev/null
+++ b/experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature
@@ -0,0 +1,58 @@
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+@experimental
+Feature: AWS DDB Sink - PutItem
+
+  Background:
+    Given Kamelet aws-ddb-sink-experimental is available
+    Given Camel K resource polling configuration
+      | maxAttempts          | 200   |
+      | delayBetweenAttempts | 2000  |
+    Given variables
+      | timer.source.period  | 10000 |
+      | aws.ddb.operation    | PutItem |
+      | aws.ddb.tableName    | movies |
+      | aws.ddb.item.id      | 1 |
+      | aws.ddb.item.year    | 1977 |
+      | aws.ddb.item.title   | Star Wars IV |
+      | aws.ddb.json.data    | { "id":${aws.ddb.item.id}, "year":${aws.ddb.item.year}, "title":"${aws.ddb.item.title}" } |
+      | aws.ddb.items        | [{year=AttributeValue(N=${aws.ddb.item.year}), id=AttributeValue(N=${aws.ddb.item.id}), title=AttributeValue(S=${aws.ddb.item.title})}] |
+
+  Scenario: Start LocalStack container
+    Given Enable service DYNAMODB
+    Given start LocalStack container
+    And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}'
+
+  Scenario: Create AWS-DDB client
+    Given New global Camel context
+    Given load to Camel registry amazonDDBClient.groovy
+
+  Scenario: Create AWS-DDB Kamelet sink binding
+    When load KameletBinding aws-ddb-sink-binding.yaml
+    And KameletBinding aws-ddb-sink-binding is available
+    And Camel K integration aws-ddb-sink-binding is running
+    And Camel K integration aws-ddb-sink-binding should print Routes startup
+    Then sleep 10sec
+
+  Scenario: Verify Kamelet sink
+    Then run script verifyItems.groovy
+
+  Scenario: Remove Camel K resources
+    Given delete KameletBinding aws-ddb-sink-binding
+
+  Scenario: Stop container
+    Given stop LocalStack container
diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature b/experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature
new file mode 100644
index 00000000..215adbe2
--- /dev/null
+++ b/experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature
@@ -0,0 +1,68 @@
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+@experimental
+Feature: AWS DDB Sink - UpdateItem
+
+  Background:
+    Given Kamelet aws-ddb-sink-experimental is available
+    Given Camel K resource polling configuration
+      | maxAttempts          | 200   |
+      | delayBetweenAttempts | 2000  |
+    Given variables
+      | timer.source.period    | 10000 |
+      | aws.ddb.operation      | UpdateItem |
+      | aws.ddb.tableName      | movies |
+      | aws.ddb.item.id        | 1 |
+      | aws.ddb.item.year      | 1933 |
+      | aws.ddb.item.title     | King Kong |
+      | aws.ddb.item.title.new | King Kong - Historical |
+      | aws.ddb.item.directors | ["Merian C. Cooper", "Ernest B. Schoedsack"] |
+      | aws.ddb.json.data      | { "key": {"id": ${aws.ddb.item.id}}, "item": {"title": "${aws.ddb.item.title.new}", "year": ${aws.ddb.item.year}, "directors": ${aws.ddb.item.directors}} } |
+
+  Scenario: Start LocalStack container
+    Given Enable service DYNAMODB
+    Given start LocalStack container
+    And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}'
+
+  Scenario: Create AWS-DDB client
+    Given New global Camel context
+    Given load to Camel registry amazonDDBClient.groovy
+
+  Scenario: Create item on AWS-DDB
+    Given run script putItem.groovy
+    Given variables
+      | aws.ddb.items | [{year=AttributeValue(N=${aws.ddb.item.year}), id=AttributeValue(N=${aws.ddb.item.id}), title=AttributeValue(S=${aws.ddb.item.title})}] |
+    Then run script verifyItems.groovy
+
+  Scenario: Create AWS-DDB Kamelet sink binding
+    When load KameletBinding aws-ddb-sink-binding.yaml
+    And KameletBinding aws-ddb-sink-binding is available
+    And Camel K integration aws-ddb-sink-binding is running
+    And Camel K integration aws-ddb-sink-binding should print Routes startup
+    Then sleep 10sec
+
+  Scenario: Verify Kamelet sink
+    Given variables
+      | aws.ddb.item.directors | [Ernest B. Schoedsack, Merian C. Cooper] |
+      | aws.ddb.items | [{year=AttributeValue(N=${aws.ddb.item.year}), directors=AttributeValue(SS=${aws.ddb.item.directors}), id=AttributeValue(N=${aws.ddb.item.id}), title=AttributeValue(S=${aws.ddb.item.title.new})}] |
+    Then run script verifyItems.groovy
+
+  Scenario: Remove Camel K resources
+    Given delete KameletBinding aws-ddb-sink-binding
+
+  Scenario: Stop container
+    Given stop LocalStack container
diff --git a/experimental/test/aws-ddb-sink/putItem.groovy b/experimental/test/aws-ddb-sink/putItem.groovy
new file mode 100644
index 00000000..fd482f90
--- /dev/null
+++ b/experimental/test/aws-ddb-sink/putItem.groovy
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import software.amazon.awssdk.services.dynamodb.model.AttributeValue
+import software.amazon.awssdk.services.dynamodb.model.ReturnValue
+
+Map<String, AttributeValue> item = new HashMap<>()
+item.put("id", AttributeValue.builder().n("${aws.ddb.item.id}").build())
+item.put("year", AttributeValue.builder().n("${aws.ddb.item.year}").build())
+item.put("title", AttributeValue.builder().s("${aws.ddb.item.title}").build())
+
+amazonDDBClient.putItem(b -> {
+    b.tableName("${aws.ddb.tableName}")
+    b.item(item)
+    b.returnValues(ReturnValue.ALL_OLD)
+})
diff --git a/experimental/test/aws-ddb-sink/verifyItems.groovy b/experimental/test/aws-ddb-sink/verifyItems.groovy
new file mode 100644
index 00000000..b6e9d27c
--- /dev/null
+++ b/experimental/test/aws-ddb-sink/verifyItems.groovy
@@ -0,0 +1,18 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+assert "${aws.ddb.items}".equals(amazonDDBClient.scan(b -> b.tableName("${aws.ddb.tableName}"))?.items()?.toString())
diff --git a/test/aws-s3/yaks-config.yaml b/experimental/test/aws-ddb-sink/yaks-config.yaml
similarity index 73%
copy from test/aws-s3/yaks-config.yaml
copy to experimental/test/aws-ddb-sink/yaks-config.yaml
index 6f1a0d0d..51cf3b52 100644
--- a/test/aws-s3/yaks-config.yaml
+++ b/experimental/test/aws-ddb-sink/yaks-config.yaml
@@ -28,38 +28,25 @@ config:
         value: false
       - name: YAKS_KAMELETS_AUTO_REMOVE_RESOURCES
         value: false
-      - name: YAKS_KUBERNETES_AUTO_REMOVE_RESOURCES
-        value: false
-      - name: YAKS_KNATIVE_AUTO_REMOVE_RESOURCES
-        value: false
       - name: YAKS_TESTCONTAINERS_AUTO_REMOVE_RESOURCES
         value: false
       - name: CITRUS_TYPE_CONVERTER
         value: camel
     resources:
-      - amazonS3Client.groovy
-      - aws-s3-credentials.properties
-      - aws-s3-to-log-uri-based.groovy
-      - aws-s3-to-log-secret-based.groovy
-      - aws-s3-uri-binding.yaml
-      - aws-s3-to-knative.yaml
-      - aws-s3-to-knative-channel.yaml
-      - ../utils/knative-channel-to-log.yaml
+      - putItem.groovy
+      - verifyItems.groovy
+      - amazonDDBClient.groovy
+      - aws-ddb-sink-binding.yaml
     cucumber:
       tags:
         - "not @ignored"
     settings:
-      loggers:
-        - name: Logger.Message_IN
-          level: DEBUG
-        - name: Logger.Message_OUT
-          level: DEBUG
       dependencies:
         - groupId: com.amazonaws
-          artifactId: aws-java-sdk-kinesis
+          artifactId: aws-java-sdk-dynamodb
           version: "@aws-java-sdk.version@"
         - groupId: org.apache.camel
-          artifactId: camel-aws2-s3
+          artifactId: camel-aws2-ddb
           version: "@camel.version@"
         - groupId: org.apache.camel
           artifactId: camel-jackson
@@ -69,3 +56,7 @@ config:
     failedOnly: true
     includes:
       - app=camel-k
+pre:
+  - name: Install experimental Kamelets
+    run: |
+      kubectl apply -f ../../aws-ddb-sink.exp.kamelet.yaml -n $YAKS_NAMESPACE
diff --git a/experimental/test/aws-s3/amazonS3Client.groovy b/experimental/test/aws-s3/amazonS3Client.groovy
new file mode 100644
index 00000000..5c3ff8a0
--- /dev/null
+++ b/experimental/test/aws-s3/amazonS3Client.groovy
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials
+import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider
+import software.amazon.awssdk.regions.Region
+import software.amazon.awssdk.services.s3.S3Client
+
+S3Client s3 = S3Client
+        .builder()
+        .endpointOverride(URI.create("${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}"))
+        .credentialsProvider(StaticCredentialsProvider.create(
+                AwsBasicCredentials.create(
+                        "${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}",
+                        "${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}")
+        ))
+        .region(Region.of("${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}"))
+        .build()
+
+s3.createBucket(b -> b.bucket("${aws.s3.bucketNameOrArn}"))
+
+return s3
diff --git a/test/aws-s3/aws-s3-cloudevents.feature b/experimental/test/aws-s3/aws-s3-cloudevents.feature
similarity index 96%
rename from test/aws-s3/aws-s3-cloudevents.feature
rename to experimental/test/aws-s3/aws-s3-cloudevents.feature
index 5774b738..6f5513fc 100644
--- a/test/aws-s3/aws-s3-cloudevents.feature
+++ b/experimental/test/aws-s3/aws-s3-cloudevents.feature
@@ -1,7 +1,9 @@
 @knative
+@experimental
 Feature: AWS S3 Kamelet - cloud events data type
 
   Background:
+    Given Kamelet aws-s3-source-experimental is available
     Given Knative event consumer timeout is 20000 ms
     Given Camel K resource polling configuration
       | maxAttempts          | 200   |
diff --git a/test/aws-s3/aws-s3-knative.feature b/experimental/test/aws-s3/aws-s3-knative.feature
similarity index 96%
rename from test/aws-s3/aws-s3-knative.feature
rename to experimental/test/aws-s3/aws-s3-knative.feature
index dc358797..8a6512a9 100644
--- a/test/aws-s3/aws-s3-knative.feature
+++ b/experimental/test/aws-s3/aws-s3-knative.feature
@@ -1,7 +1,9 @@
 @knative
+@experimental
 Feature: AWS S3 Kamelet - Knative binding
 
   Background:
+    Given Kamelet aws-s3-source-experimental is available
     Given Knative event consumer timeout is 20000 ms
     Given Camel K resource polling configuration
       | maxAttempts          | 200   |
diff --git a/test/aws-s3/aws-s3-to-knative.yaml b/experimental/test/aws-s3/aws-s3-to-knative.yaml
similarity index 97%
rename from test/aws-s3/aws-s3-to-knative.yaml
rename to experimental/test/aws-s3/aws-s3-to-knative.yaml
index e99ee20f..afa1b572 100644
--- a/test/aws-s3/aws-s3-to-knative.yaml
+++ b/experimental/test/aws-s3/aws-s3-to-knative.yaml
@@ -24,7 +24,7 @@ spec:
     ref:
       kind: Kamelet
       apiVersion: camel.apache.org/v1alpha1
-      name: aws-s3-source
+      name: aws-s3-source-experimental
     properties:
       bucketNameOrArn: ${aws.s3.bucketNameOrArn}
       overrideEndpoint: true
diff --git a/test/aws-s3/yaks-config.yaml b/experimental/test/aws-s3/yaks-config.yaml
similarity index 90%
copy from test/aws-s3/yaks-config.yaml
copy to experimental/test/aws-s3/yaks-config.yaml
index 6f1a0d0d..6431eaf8 100644
--- a/test/aws-s3/yaks-config.yaml
+++ b/experimental/test/aws-s3/yaks-config.yaml
@@ -38,13 +38,7 @@ config:
         value: camel
     resources:
       - amazonS3Client.groovy
-      - aws-s3-credentials.properties
-      - aws-s3-to-log-uri-based.groovy
-      - aws-s3-to-log-secret-based.groovy
-      - aws-s3-uri-binding.yaml
       - aws-s3-to-knative.yaml
-      - aws-s3-to-knative-channel.yaml
-      - ../utils/knative-channel-to-log.yaml
     cucumber:
       tags:
         - "not @ignored"
@@ -69,3 +63,7 @@ config:
     failedOnly: true
     includes:
       - app=camel-k
+pre:
+  - name: Install experimental Kamelets
+    run: |
+      kubectl apply -f ../../aws-s3-source.exp.kamelet.yaml -n $YAKS_NAMESPACE
diff --git a/test/aws-s3/yaks-config.yaml b/test/aws-s3/yaks-config.yaml
index 6f1a0d0d..a2831684 100644
--- a/test/aws-s3/yaks-config.yaml
+++ b/test/aws-s3/yaks-config.yaml
@@ -42,7 +42,6 @@ config:
       - aws-s3-to-log-uri-based.groovy
       - aws-s3-to-log-secret-based.groovy
       - aws-s3-uri-binding.yaml
-      - aws-s3-to-knative.yaml
       - aws-s3-to-knative-channel.yaml
       - ../utils/knative-channel-to-log.yaml
     cucumber:


[camel-kamelets] 20/28: Move AWS S3 binary output type to generic level

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit 606adff22fe9165d6372d35440640b594e63f3c7
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Tue Nov 29 09:53:51 2022 +0100

    Move AWS S3 binary output type to generic level
---
 .../utils/format/DefaultDataTypeConverter.java     | 10 +++-
 .../utils/format/DefaultDataTypeRegistry.java      | 15 ++++--
 .../converter/aws2/s3/AWS2S3BinaryOutputType.java  | 55 ----------------------
 .../format/converter/standard/BinaryDataType.java  | 38 +++++++++++++++
 .../format/converter/standard/StringDataType.java  | 38 +++++++++++++++
 .../converter/{aws2-s3-binary => camel-binary}     |  2 +-
 .../converter/{aws2-s3-binary => camel-jsonObject} |  2 +-
 .../converter/{aws2-s3-binary => camel-string}     |  2 +-
 .../utils/format/DefaultDataTypeRegistryTest.java  |  8 ++--
 .../BinaryDataTypeTest.java}                       | 41 +++++++++-------
 .../StringDataTypeTest.java}                       | 49 +++++++++----------
 11 files changed, 150 insertions(+), 110 deletions(-)

diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java
index 9f2c31ce..b639ceae 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java
@@ -17,9 +17,12 @@
 
 package org.apache.camel.kamelets.utils.format;
 
+import org.apache.camel.CamelExecutionException;
 import org.apache.camel.Exchange;
+import org.apache.camel.InvalidPayloadException;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
+import org.apache.camel.util.ObjectHelper;
 
 /**
  * Default data type converter receives a name and a target type in order to use traditional exchange body conversion
@@ -53,7 +56,12 @@ public class DefaultDataTypeConverter implements DataTypeConverter {
             return;
         }
 
-        exchange.getMessage().setBody(exchange.getMessage().getBody(type));
+        try {
+            exchange.getMessage().setBody(exchange.getMessage().getMandatoryBody(type));
+        } catch (InvalidPayloadException e) {
+            throw new CamelExecutionException(String.format("Failed to convert exchange body to '%s' content using type %s",
+                    name, ObjectHelper.name(type)), exchange, e);
+        }
     }
 
     @Override
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
index 3d5b514e..1e530468 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
@@ -26,6 +26,9 @@ import java.util.Optional;
 import org.apache.camel.CamelContext;
 import org.apache.camel.CamelContextAware;
 import org.apache.camel.RuntimeCamelException;
+import org.apache.camel.kamelets.utils.format.converter.standard.BinaryDataType;
+import org.apache.camel.kamelets.utils.format.converter.standard.JsonModelDataType;
+import org.apache.camel.kamelets.utils.format.converter.standard.StringDataType;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverterResolver;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader;
@@ -55,6 +58,7 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
     private DataTypeConverterResolver dataTypeConverterResolver;
 
     private boolean classpathScan = true;
+    private boolean useDefaultConverters = true;
 
     private final Map<String, List<DataTypeConverter>> dataTypeConverters = new HashMap<>();
 
@@ -100,11 +104,12 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
 
         if (classpathScan) {
             dataTypeLoaders.add(new AnnotationDataTypeLoader());
+        } else if (useDefaultConverters) {
+            addDataTypeConverter(new BinaryDataType());
+            addDataTypeConverter(new StringDataType());
+            addDataTypeConverter(new JsonModelDataType());
         }
 
-        addDataTypeConverter(new DefaultDataTypeConverter(DataType.DEFAULT_SCHEME, "string", "text/plain", String.class));
-        addDataTypeConverter(new DefaultDataTypeConverter(DataType.DEFAULT_SCHEME, "binary", "application/octet-stream", byte[].class));
-
         for (DataTypeLoader loader : dataTypeLoaders) {
             CamelContextAware.trySetCamelContext(loader, getCamelContext());
             loader.load(this);
@@ -180,6 +185,10 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
         this.classpathScan = classpathScan;
     }
 
+    public void setUseDefaultConverters(boolean useDefaultConverters) {
+        this.useDefaultConverters = useDefaultConverters;
+    }
+
     @Override
     public CamelContext getCamelContext() {
         return camelContext;
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java
deleted file mode 100644
index 5f1fa0b8..00000000
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.camel.kamelets.utils.format.converter.aws2.s3;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-import org.apache.camel.CamelExecutionException;
-import org.apache.camel.Exchange;
-import org.apache.camel.InvalidPayloadException;
-import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
-import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
-import software.amazon.awssdk.utils.IoUtils;
-
-/**
- * Binary output type.
- */
-@DataType(scheme = "aws2-s3", name = "binary", mediaType = "application/octet-stream")
-public class AWS2S3BinaryOutputType implements DataTypeConverter {
-
-    @Override
-    public void convert(Exchange exchange) {
-        if (exchange.getMessage().getBody() instanceof byte[]) {
-            return;
-        }
-
-        try {
-            InputStream is = exchange.getMessage().getBody(InputStream.class);
-            if (is != null) {
-                exchange.getMessage().setBody(IoUtils.toByteArray(is));
-                return;
-            }
-
-            // Use default Camel converter utils to convert body to byte[]
-            exchange.getMessage().setBody(exchange.getMessage().getMandatoryBody(byte[].class));
-        } catch (IOException | InvalidPayloadException e) {
-            throw new CamelExecutionException("Failed to convert AWS S3 body to byte[]", exchange, e);
-        }
-    }
-}
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/BinaryDataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/BinaryDataType.java
new file mode 100644
index 00000000..532e998b
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/BinaryDataType.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format.converter.standard;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.kamelets.utils.format.DefaultDataTypeConverter;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
+
+/**
+ * Binary data type.
+ */
+@DataType(name = "binary", mediaType = "application/octet-stream")
+public class BinaryDataType implements DataTypeConverter {
+
+    private static final DataTypeConverter DELEGATE =
+            new DefaultDataTypeConverter(DataType.DEFAULT_SCHEME, "binary", "application/octet-stream", byte[].class);
+
+    @Override
+    public void convert(Exchange exchange) {
+        DELEGATE.convert(exchange);
+    }
+}
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/StringDataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/StringDataType.java
new file mode 100644
index 00000000..d60b2aaa
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/StringDataType.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format.converter.standard;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.kamelets.utils.format.DefaultDataTypeConverter;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
+
+/**
+ * String data type.
+ */
+@DataType(name = "string", mediaType = "text/plain")
+public class StringDataType implements DataTypeConverter {
+
+    private static final DataTypeConverter DELEGATE =
+            new DefaultDataTypeConverter(DataType.DEFAULT_SCHEME, "string", "text/plain", String.class);
+
+    @Override
+    public void convert(Exchange exchange) {
+        DELEGATE.convert(exchange);
+    }
+}
diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-binary
similarity index 90%
copy from library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary
copy to library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-binary
index ba9c13f3..edf9a4ca 100644
--- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary
+++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-binary
@@ -15,4 +15,4 @@
 # limitations under the License.
 #
 
-class=org.apache.camel.kamelets.utils.format.converter.aws2.s3.AWS2S3BinaryOutputType
\ No newline at end of file
+class=org.apache.camel.kamelets.utils.format.converter.standard.BinaryDataType
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject
similarity index 90%
copy from library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary
copy to library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject
index ba9c13f3..2f725f6a 100644
--- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary
+++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject
@@ -15,4 +15,4 @@
 # limitations under the License.
 #
 
-class=org.apache.camel.kamelets.utils.format.converter.aws2.s3.AWS2S3BinaryOutputType
\ No newline at end of file
+class=org.apache.camel.kamelets.utils.format.converter.standard.JsonModelDataType
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-string
similarity index 90%
rename from library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary
rename to library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-string
index ba9c13f3..8ef25725 100644
--- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary
+++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-string
@@ -15,4 +15,4 @@
 # limitations under the License.
 #
 
-class=org.apache.camel.kamelets.utils.format.converter.aws2.s3.AWS2S3BinaryOutputType
\ No newline at end of file
+class=org.apache.camel.kamelets.utils.format.converter.standard.StringDataType
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java
index c72e7897..d83c474b 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java
@@ -21,7 +21,9 @@ import java.util.Optional;
 
 import org.apache.camel.CamelContextAware;
 import org.apache.camel.impl.DefaultCamelContext;
+import org.apache.camel.kamelets.utils.format.converter.standard.BinaryDataType;
 import org.apache.camel.kamelets.utils.format.converter.standard.JsonModelDataType;
+import org.apache.camel.kamelets.utils.format.converter.standard.StringDataType;
 import org.apache.camel.kamelets.utils.format.converter.test.UppercaseDataType;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 import org.junit.jupiter.api.Assertions;
@@ -44,12 +46,10 @@ class DefaultDataTypeRegistryTest {
         Assertions.assertEquals(JsonModelDataType.class, converter.get().getClass());
         converter = dataTypeRegistry.lookup( "string");
         Assertions.assertTrue(converter.isPresent());
-        Assertions.assertEquals(DefaultDataTypeConverter.class, converter.get().getClass());
-        Assertions.assertEquals(String.class, ((DefaultDataTypeConverter) converter.get()).getType());
+        Assertions.assertEquals(StringDataType.class, converter.get().getClass());
         converter = dataTypeRegistry.lookup( "binary");
         Assertions.assertTrue(converter.isPresent());
-        Assertions.assertEquals(DefaultDataTypeConverter.class, converter.get().getClass());
-        Assertions.assertEquals(byte[].class, ((DefaultDataTypeConverter) converter.get()).getType());
+        Assertions.assertEquals(BinaryDataType.class, converter.get().getClass());
         converter = dataTypeRegistry.lookup( "lowercase");
         Assertions.assertTrue(converter.isPresent());
         converter = dataTypeRegistry.lookup( "uppercase");
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/BinaryDataTypeTest.java
similarity index 72%
copy from library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputTypeTest.java
copy to library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/BinaryDataTypeTest.java
index 26b359f4..d2dd616a 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputTypeTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/BinaryDataTypeTest.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.camel.kamelets.utils.format.converter.aws2.s3;
+package org.apache.camel.kamelets.utils.format.converter.standard;
 
 import java.io.ByteArrayInputStream;
 import java.nio.charset.StandardCharsets;
@@ -22,32 +22,40 @@ import java.util.Optional;
 
 import org.apache.camel.CamelContextAware;
 import org.apache.camel.Exchange;
-import org.apache.camel.component.aws2.s3.AWS2S3Constants;
 import org.apache.camel.impl.DefaultCamelContext;
 import org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 import org.apache.camel.support.DefaultExchange;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
-import software.amazon.awssdk.core.ResponseInputStream;
-import software.amazon.awssdk.http.AbortableInputStream;
-import software.amazon.awssdk.services.s3.model.GetObjectRequest;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
-public class AWS2S3BinaryOutputTypeTest {
+public class BinaryDataTypeTest {
 
     private final DefaultCamelContext camelContext = new DefaultCamelContext();
 
-    private final AWS2S3BinaryOutputType outputType = new AWS2S3BinaryOutputType();
+    private final BinaryDataType dataType = new BinaryDataType();
+
+    @Test
+    void shouldRetainBytesModel() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.getMessage().setHeader("file", "test.txt");
+        exchange.getMessage().setBody("Test".getBytes(StandardCharsets.UTF_8));
+        dataType.convert(exchange);
+
+        Assertions.assertTrue(exchange.getMessage().hasHeaders());
+        assertBinaryBody(exchange, "test.txt", "Test");
+    }
 
     @Test
     void shouldMapFromStringToBytesModel() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
-        exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test1.txt");
+        exchange.getMessage().setHeader("file", "test1.txt");
         exchange.getMessage().setBody("Test1");
-        outputType.convert(exchange);
+        dataType.convert(exchange);
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
         assertBinaryBody(exchange, "test1.txt", "Test1");
@@ -57,9 +65,9 @@ public class AWS2S3BinaryOutputTypeTest {
     void shouldMapFromBytesToBytesModel() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
-        exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test2.txt");
+        exchange.getMessage().setHeader("file", "test2.txt");
         exchange.getMessage().setBody("Test2".getBytes(StandardCharsets.UTF_8));
-        outputType.convert(exchange);
+        dataType.convert(exchange);
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
         assertBinaryBody(exchange, "test2.txt", "Test2");
@@ -69,10 +77,9 @@ public class AWS2S3BinaryOutputTypeTest {
     void shouldMapFromInputStreamToBytesModel() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
-        exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test3.txt");
-        exchange.getMessage().setBody(new ResponseInputStream<>(GetObjectRequest.builder().bucket("myBucket").key("test3.txt").build(),
-                AbortableInputStream.create(new ByteArrayInputStream("Test3".getBytes(StandardCharsets.UTF_8)))));
-        outputType.convert(exchange);
+        exchange.getMessage().setHeader("file", "test3.txt");
+        exchange.getMessage().setBody(new ByteArrayInputStream("Test3".getBytes(StandardCharsets.UTF_8)));
+        dataType.convert(exchange);
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
         assertBinaryBody(exchange, "test3.txt", "Test3");
@@ -82,12 +89,12 @@ public class AWS2S3BinaryOutputTypeTest {
     public void shouldLookupDataType() throws Exception {
         DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry();
         CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext);
-        Optional<DataTypeConverter> converter = dataTypeRegistry.lookup("aws2-s3", "binary");
+        Optional<DataTypeConverter> converter = dataTypeRegistry.lookup( "binary");
         Assertions.assertTrue(converter.isPresent());
     }
 
     private static void assertBinaryBody(Exchange exchange, String key, String content) {
-        assertEquals(key, exchange.getMessage().getHeader(AWS2S3Constants.KEY));
+        assertEquals(key, exchange.getMessage().getHeader("file"));
 
         assertEquals(byte[].class, exchange.getMessage().getBody().getClass());
         assertEquals(content, exchange.getMessage().getBody(String.class));
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/StringDataTypeTest.java
similarity index 58%
rename from library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputTypeTest.java
rename to library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/StringDataTypeTest.java
index 26b359f4..8ee19cba 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputTypeTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/StringDataTypeTest.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.camel.kamelets.utils.format.converter.aws2.s3;
+package org.apache.camel.kamelets.utils.format.converter.standard;
 
 import java.io.ByteArrayInputStream;
 import java.nio.charset.StandardCharsets;
@@ -22,74 +22,69 @@ import java.util.Optional;
 
 import org.apache.camel.CamelContextAware;
 import org.apache.camel.Exchange;
-import org.apache.camel.component.aws2.s3.AWS2S3Constants;
 import org.apache.camel.impl.DefaultCamelContext;
 import org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 import org.apache.camel.support.DefaultExchange;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
-import software.amazon.awssdk.core.ResponseInputStream;
-import software.amazon.awssdk.http.AbortableInputStream;
-import software.amazon.awssdk.services.s3.model.GetObjectRequest;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
-public class AWS2S3BinaryOutputTypeTest {
+public class StringDataTypeTest {
 
     private final DefaultCamelContext camelContext = new DefaultCamelContext();
 
-    private final AWS2S3BinaryOutputType outputType = new AWS2S3BinaryOutputType();
+    private final StringDataType dataType = new StringDataType();
 
     @Test
-    void shouldMapFromStringToBytesModel() throws Exception {
+    void shouldRetainStringModel() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
-        exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test1.txt");
-        exchange.getMessage().setBody("Test1");
-        outputType.convert(exchange);
+        exchange.getMessage().setHeader("file", "test.txt");
+        exchange.getMessage().setBody("Test");
+        dataType.convert(exchange);
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
-        assertBinaryBody(exchange, "test1.txt", "Test1");
+        assertStringBody(exchange, "test.txt", "Test");
     }
 
     @Test
-    void shouldMapFromBytesToBytesModel() throws Exception {
+    void shouldMapFromBinaryToStringModel() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
-        exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test2.txt");
-        exchange.getMessage().setBody("Test2".getBytes(StandardCharsets.UTF_8));
-        outputType.convert(exchange);
+        exchange.getMessage().setHeader("file", "test1.txt");
+        exchange.getMessage().setBody("Test1".getBytes(StandardCharsets.UTF_8));
+        dataType.convert(exchange);
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
-        assertBinaryBody(exchange, "test2.txt", "Test2");
+        assertStringBody(exchange, "test1.txt", "Test1");
     }
 
     @Test
-    void shouldMapFromInputStreamToBytesModel() throws Exception {
+    void shouldMapFromInputStreamToStringModel() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
-        exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test3.txt");
-        exchange.getMessage().setBody(new ResponseInputStream<>(GetObjectRequest.builder().bucket("myBucket").key("test3.txt").build(),
-                AbortableInputStream.create(new ByteArrayInputStream("Test3".getBytes(StandardCharsets.UTF_8)))));
-        outputType.convert(exchange);
+        exchange.getMessage().setHeader("file", "test3.txt");
+        exchange.getMessage().setBody(new ByteArrayInputStream("Test3".getBytes(StandardCharsets.UTF_8)));
+        dataType.convert(exchange);
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
-        assertBinaryBody(exchange, "test3.txt", "Test3");
+        assertStringBody(exchange, "test3.txt", "Test3");
     }
 
     @Test
     public void shouldLookupDataType() throws Exception {
         DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry();
         CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext);
-        Optional<DataTypeConverter> converter = dataTypeRegistry.lookup("aws2-s3", "binary");
+        Optional<DataTypeConverter> converter = dataTypeRegistry.lookup( "string");
         Assertions.assertTrue(converter.isPresent());
     }
 
-    private static void assertBinaryBody(Exchange exchange, String key, String content) {
-        assertEquals(key, exchange.getMessage().getHeader(AWS2S3Constants.KEY));
+    private static void assertStringBody(Exchange exchange, String key, String content) {
+        assertEquals(key, exchange.getMessage().getHeader("file"));
 
-        assertEquals(byte[].class, exchange.getMessage().getBody().getClass());
+        assertEquals(String.class, exchange.getMessage().getBody().getClass());
         assertEquals(content, exchange.getMessage().getBody(String.class));
     }
 }


[camel-kamelets] 10/28: Remove AWS S3 Json output type

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit 30c5571f822c4832514f87ca5211d449719d9d85
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Fri Nov 18 19:44:05 2022 +0100

    Remove AWS S3 Json output type
    
    Not a robust solution at the moment
---
 .../converter/aws2/s3/AWS2S3JsonOutputType.java    | 63 ----------------------
 .../apache/camel/datatype/converter/aws2-s3-json   | 18 -------
 ...peTest.java => AWS2S3BinaryOutputTypeTest.java} | 31 +++++------
 3 files changed, 14 insertions(+), 98 deletions(-)

diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputType.java
deleted file mode 100644
index 74736d67..00000000
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputType.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.camel.kamelets.utils.format.converter.aws2.s3;
-
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-
-import org.apache.camel.CamelExecutionException;
-import org.apache.camel.Exchange;
-import org.apache.camel.component.aws2.s3.AWS2S3Constants;
-import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
-import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
-import software.amazon.awssdk.core.ResponseInputStream;
-import software.amazon.awssdk.utils.IoUtils;
-
-/**
- * Json output data type represents file name as key and file content as Json structure.
- * <p/>
- * Example Json structure: { "key": "myFile.txt", "content": "Hello", }
- */
-@DataType(scheme = "aws2-s3", name = "json")
-public class AWS2S3JsonOutputType implements DataTypeConverter {
-
-    private static final String TEMPLATE = "{" +
-            "\"key\": \"%s\", " +
-            "\"content\": \"%s\"" +
-            "}";
-
-    @Override
-    public void convert(Exchange exchange) {
-        String key = exchange.getMessage().getHeader(AWS2S3Constants.KEY, String.class);
-
-        ResponseInputStream<?> bodyInputStream = exchange.getMessage().getBody(ResponseInputStream.class);
-        if (bodyInputStream != null) {
-            try {
-                exchange.getMessage().setBody(String.format(TEMPLATE, key, IoUtils.toUtf8String(bodyInputStream)));
-                return;
-            } catch (IOException e) {
-                throw new CamelExecutionException("Failed to convert AWS S3 body to Json", exchange, e);
-            }
-        }
-
-        byte[] bodyContent = exchange.getMessage().getBody(byte[].class);
-        if (bodyContent != null) {
-            exchange.getMessage().setBody(String.format(TEMPLATE, key, new String(bodyContent, StandardCharsets.UTF_8)));
-        }
-    }
-}
diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-json b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-json
deleted file mode 100644
index 7a7c544f..00000000
--- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-json
+++ /dev/null
@@ -1,18 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-class=org.apache.camel.kamelets.utils.format.converter.aws2.s3.AWS2S3JsonOutputType
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputTypeTest.java
similarity index 76%
rename from library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputTypeTest.java
rename to library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputTypeTest.java
index 53357add..26b359f4 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputTypeTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputTypeTest.java
@@ -35,14 +35,14 @@ import software.amazon.awssdk.services.s3.model.GetObjectRequest;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
-public class AWS2S3JsonOutputTypeTest {
+public class AWS2S3BinaryOutputTypeTest {
 
     private final DefaultCamelContext camelContext = new DefaultCamelContext();
 
-    private final AWS2S3JsonOutputType outputType = new AWS2S3JsonOutputType();
+    private final AWS2S3BinaryOutputType outputType = new AWS2S3BinaryOutputType();
 
     @Test
-    void shouldMapFromStringToJsonModel() throws Exception {
+    void shouldMapFromStringToBytesModel() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
         exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test1.txt");
@@ -50,13 +50,11 @@ public class AWS2S3JsonOutputTypeTest {
         outputType.convert(exchange);
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
-        assertEquals("test1.txt", exchange.getMessage().getHeader(AWS2S3Constants.KEY));
-
-        assertJsonModelBody(exchange, "test1.txt", "Test1");
+        assertBinaryBody(exchange, "test1.txt", "Test1");
     }
 
     @Test
-    void shouldMapFromBytesToJsonModel() throws Exception {
+    void shouldMapFromBytesToBytesModel() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
         exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test2.txt");
@@ -64,13 +62,11 @@ public class AWS2S3JsonOutputTypeTest {
         outputType.convert(exchange);
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
-        assertEquals("test2.txt", exchange.getMessage().getHeader(AWS2S3Constants.KEY));
-
-        assertJsonModelBody(exchange, "test2.txt", "Test2");
+        assertBinaryBody(exchange, "test2.txt", "Test2");
     }
 
     @Test
-    void shouldMapFromInputStreamToJsonModel() throws Exception {
+    void shouldMapFromInputStreamToBytesModel() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
         exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test3.txt");
@@ -79,20 +75,21 @@ public class AWS2S3JsonOutputTypeTest {
         outputType.convert(exchange);
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
-        assertEquals("test3.txt", exchange.getMessage().getHeader(AWS2S3Constants.KEY));
-
-        assertJsonModelBody(exchange, "test3.txt", "Test3");
+        assertBinaryBody(exchange, "test3.txt", "Test3");
     }
 
     @Test
     public void shouldLookupDataType() throws Exception {
         DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry();
         CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext);
-        Optional<DataTypeConverter> converter = dataTypeRegistry.lookup("aws2-s3", "json");
+        Optional<DataTypeConverter> converter = dataTypeRegistry.lookup("aws2-s3", "binary");
         Assertions.assertTrue(converter.isPresent());
     }
 
-    private static void assertJsonModelBody(Exchange exchange, String key, String content) {
-        assertEquals(String.format("{\"key\": \"%s\", \"content\": \"%s\"}", key, content), exchange.getMessage().getBody());
+    private static void assertBinaryBody(Exchange exchange, String key, String content) {
+        assertEquals(key, exchange.getMessage().getHeader(AWS2S3Constants.KEY));
+
+        assertEquals(byte[].class, exchange.getMessage().getBody().getClass());
+        assertEquals(content, exchange.getMessage().getBody(String.class));
     }
 }


[camel-kamelets] 06/28: Fail on missing data type and add log output

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit 814654a81cb645e5099ba55910476c3e9915274f
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Fri Nov 18 10:48:51 2022 +0100

    Fail on missing data type and add log output
---
 .../kamelets/utils/format/DataTypeProcessor.java   | 22 ++++++++++-
 .../format/DefaultDataTypeConverterResolver.java   |  5 +++
 .../utils/format/DefaultDataTypeRegistry.java      | 44 ++++++++++++++++++----
 .../utils/format/DataTypeProcessorTest.java        |  3 +-
 4 files changed, 65 insertions(+), 9 deletions(-)

diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java
index def0f2b8..110c5cd4 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java
@@ -21,9 +21,12 @@ import java.util.Optional;
 
 import org.apache.camel.CamelContext;
 import org.apache.camel.CamelContextAware;
+import org.apache.camel.CamelExecutionException;
 import org.apache.camel.Exchange;
 import org.apache.camel.Processor;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Processor applies data type conversion based on given format name. Searches for matching data type converter
@@ -33,6 +36,8 @@ public class DataTypeProcessor implements Processor, CamelContextAware {
 
     public static final String DATA_TYPE_FORMAT_PROPERTY = "CamelDataTypeFormat";
 
+    private static final Logger LOG = LoggerFactory.getLogger(DataTypeProcessor.class);
+
     private CamelContext camelContext;
 
     private DefaultDataTypeRegistry registry;
@@ -40,6 +45,8 @@ public class DataTypeProcessor implements Processor, CamelContextAware {
     private String scheme;
     private String format;
 
+    private boolean ignoreMissingDataType = false;
+
     private DataTypeConverter converter;
 
     @Override
@@ -52,7 +59,16 @@ public class DataTypeProcessor implements Processor, CamelContextAware {
             return;
         }
 
-        doConverterLookup().ifPresent(converter -> converter.convert(exchange));
+        Optional<DataTypeConverter> dataTypeConverter = doConverterLookup();
+        dataTypeConverter.ifPresent(converter -> converter.convert(exchange));
+
+        if (!dataTypeConverter.isPresent()) {
+            LOG.debug("Unable to find data type for scheme {} and format name {}", scheme, format);
+
+            if (!ignoreMissingDataType) {
+                throw new CamelExecutionException(String.format("Missing data type for scheme %s and format name %s", scheme, format), exchange);
+            }
+        }
     }
 
     private Optional<DataTypeConverter> doConverterLookup() {
@@ -78,6 +94,10 @@ public class DataTypeProcessor implements Processor, CamelContextAware {
         this.registry = dataTypeRegistry;
     }
 
+    public void setIgnoreMissingDataType(boolean ignoreMissingDataType) {
+        this.ignoreMissingDataType = ignoreMissingDataType;
+    }
+
     @Override
     public CamelContext getCamelContext() {
         return camelContext;
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java
index 85444a28..5fdaa790 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java
@@ -43,6 +43,11 @@ public class DefaultDataTypeConverterResolver implements DataTypeConverterResolv
     @Override
     public Optional<DataTypeConverter> resolve(String scheme, String name, CamelContext context) {
         String converterName = String.format("%s-%s", scheme, name);
+
+        if (getLog().isDebugEnabled()) {
+            getLog().debug("Resolving data type converter {} via: {}{}", converterName, RESOURCE_PATH, converterName);
+        }
+
         Class<?> type = findConverter(converterName, context);
         if (type == null) {
             // not found
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
index 7105fb4c..0e262d28 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
@@ -31,6 +31,9 @@ import org.apache.camel.kamelets.utils.format.spi.DataTypeConverterResolver;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeRegistry;
 import org.apache.camel.support.service.ServiceSupport;
+import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Default data type registry able to resolve data types converters in the project. Data types may be defined at the component level
@@ -41,6 +44,8 @@ import org.apache.camel.support.service.ServiceSupport;
  */
 public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeRegistry, CamelContextAware {
 
+    private static final Logger LOG = LoggerFactory.getLogger(DefaultDataTypeRegistry.class);
+
     private CamelContext camelContext;
 
     protected final List<DataTypeLoader> dataTypeLoaders = new ArrayList<>();
@@ -51,12 +56,21 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
 
     @Override
     public void addDataTypeConverter(String scheme, DataTypeConverter converter) {
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Adding data type for scheme {} and name {}", scheme, converter.getName());
+        }
+
         this.getComponentDataTypeConverters(scheme).add(converter);
     }
 
     @Override
     public Optional<DataTypeConverter> lookup(String scheme, String name) {
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("Searching for data type with scheme {} and name {}", scheme, name);
+        }
+
         if (dataTypeLoaders.isEmpty()) {
+            LOG.trace("Lazy initializing data type registry");
             try {
                 doInit();
             } catch (Exception e) {
@@ -89,6 +103,8 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
             CamelContextAware.trySetCamelContext(loader, getCamelContext());
             loader.load(this);
         }
+
+        LOG.debug("Loaded {} initial data type converters", dataTypeConverters.size());
     }
 
     @Override
@@ -115,17 +131,31 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
         Optional<DataTypeConverter> dataTypeConverter = Optional.ofNullable(camelContext.getRegistry()
                 .lookupByNameAndType(String.format("%s-%s", scheme, name), DataTypeConverter.class));
 
-        if (!dataTypeConverter.isPresent()) {
-            // Try to retrieve converter from preloaded converters in local cache
-            dataTypeConverter = getComponentDataTypeConverters(scheme).stream()
+        if (dataTypeConverter.isPresent()) {
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Found data type {} for scheme {} and name {} in Camel registry", ObjectHelper.name(dataTypeConverter.get().getClass()), scheme, name);
+            }
+            return dataTypeConverter;
+        }
+
+        // Try to retrieve converter from preloaded converters in local cache
+        dataTypeConverter = getComponentDataTypeConverters(scheme).stream()
                 .filter(dtc -> name.equals(dtc.getName()))
                 .findFirst();
+
+        if (dataTypeConverter.isPresent()) {
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Found data type {} for scheme {} and name {}", ObjectHelper.name(dataTypeConverter.get().getClass()), scheme, name);
+            }
+            return dataTypeConverter;
         }
 
-        if (!dataTypeConverter.isPresent()) {
-            // Try to lazy load converter via resource path lookup
-            dataTypeConverter = dataTypeConverterResolver.resolve(scheme, name, camelContext);
-            dataTypeConverter.ifPresent(converter -> getComponentDataTypeConverters(scheme).add(converter));
+        // Try to lazy load converter via resource path lookup
+        dataTypeConverter = dataTypeConverterResolver.resolve(scheme, name, camelContext);
+        dataTypeConverter.ifPresent(converter -> getComponentDataTypeConverters(scheme).add(converter));
+
+        if (LOG.isDebugEnabled() && dataTypeConverter.isPresent()) {
+            LOG.debug("Resolved data type {} for scheme {} and name {} via resource path", ObjectHelper.name(dataTypeConverter.get().getClass()), scheme, name);
         }
 
         return dataTypeConverter;
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java
index 0140b6f9..d2c2554a 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java
@@ -70,10 +70,11 @@ class DataTypeProcessorTest {
     }
 
     @Test
-    public void shouldHandleUnknownDataType() throws Exception {
+    public void shouldIgnoreUnknownDataType() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
         exchange.getMessage().setBody(new ByteArrayInputStream("Test".getBytes(StandardCharsets.UTF_8)));
+        processor.setIgnoreMissingDataType(true);
         processor.setScheme("foo");
         processor.setFormat("unknown");
         processor.process(exchange);


[camel-kamelets] 21/28: Do cache ObjectMapper instance in JsonModelDatType converter

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit d161ccfc4aa8a55e5bdd4f2edc3439b2f51e48b4
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Tue Nov 29 10:14:20 2022 +0100

    Do cache ObjectMapper instance in JsonModelDatType converter
    
    Also use Camel ClassResolver API to resolve model class
---
 .../utils/format/DefaultDataTypeRegistry.java      |  7 +++++++
 .../converter/standard/JsonModelDataType.java      | 23 ++++++++++++++++++++--
 .../converter/standard/JsonModelDataTypeTest.java  |  6 ++++++
 3 files changed, 34 insertions(+), 2 deletions(-)

diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
index 1e530468..24c77b70 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
@@ -115,6 +115,13 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR
             loader.load(this);
         }
 
+        // if applicable set Camel context on all loaded data type converters
+        dataTypeConverters.values().forEach(converters -> converters.forEach(converter -> {
+            if (converter instanceof CamelContextAware && ((CamelContextAware) converter).getCamelContext() == null) {
+                CamelContextAware.trySetCamelContext(converter, camelContext);
+            }
+        }));
+
         LOG.debug("Loaded {} schemes holding {} data type converters", dataTypeConverters.size(), dataTypeConverters.values().stream().mapToInt(List::size).sum());
     }
 
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java
index 54c67785..0a80ee32 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java
@@ -21,12 +21,15 @@ import java.io.ByteArrayInputStream;
 import java.io.InputStream;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.camel.CamelContext;
+import org.apache.camel.CamelContextAware;
 import org.apache.camel.CamelExecutionException;
 import org.apache.camel.Exchange;
 import org.apache.camel.InvalidPayloadException;
 import org.apache.camel.component.jackson.JacksonDataFormat;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
+import org.apache.camel.util.ObjectHelper;
 
 /**
  * Data type converter able to unmarshal to given unmarshalType using jackson data format.
@@ -34,18 +37,24 @@ import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
  * Unmarshal type should be given as a fully qualified class name in the exchange properties.
  */
 @DataType(name = "jsonObject", mediaType = "application/json")
-public class JsonModelDataType implements DataTypeConverter {
+public class JsonModelDataType implements DataTypeConverter, CamelContextAware {
 
     public static final String DATA_TYPE_MODEL_PROPERTY = "CamelDataTypeModel";
 
+    private CamelContext camelContext;
+
+    private static final ObjectMapper mapper = new ObjectMapper();
+
     @Override
     public void convert(Exchange exchange) {
         if (!exchange.hasProperties() || !exchange.getProperties().containsKey(DATA_TYPE_MODEL_PROPERTY)) {
             return;
         }
 
+        ObjectHelper.notNull(camelContext, "camelContext");
+
         String type = exchange.getProperty(DATA_TYPE_MODEL_PROPERTY, String.class);
-        try (JacksonDataFormat dataFormat = new JacksonDataFormat(new ObjectMapper(), Class.forName(type))) {
+        try (JacksonDataFormat dataFormat = new JacksonDataFormat(mapper, camelContext.getClassResolver().resolveMandatoryClass(type))) {
             Object unmarshalled = dataFormat.unmarshal(exchange, getBodyAsStream(exchange));
             exchange.getMessage().setBody(unmarshalled);
         } catch (Exception e) {
@@ -63,4 +72,14 @@ public class JsonModelDataType implements DataTypeConverter {
 
         return bodyStream;
     }
+
+    @Override
+    public CamelContext getCamelContext() {
+        return camelContext;
+    }
+
+    @Override
+    public void setCamelContext(CamelContext camelContext) {
+        this.camelContext = camelContext;
+    }
 }
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java
index d93da234..cb253a16 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java
@@ -27,6 +27,7 @@ import org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry;
 import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 import org.apache.camel.support.DefaultExchange;
 import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -37,6 +38,11 @@ public class JsonModelDataTypeTest {
 
     private final JsonModelDataType dataType = new JsonModelDataType();
 
+    @BeforeEach
+    public void setup() {
+        dataType.setCamelContext(camelContext);
+    }
+
     @Test
     void shouldMapFromStringToJsonModel() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);


[camel-kamelets] 26/28: Revert existing Kamelets to not use data type converter

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit a0406cc7a0587decd5eba2fe8f39a5ee5b0302e8
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Wed Nov 30 22:02:13 2022 +0100

    Revert existing Kamelets to not use data type converter
    
    - AWS S3 source Kamelet
    - AWS DDB sink Kamelet
    - JsonToDdbModelConverter utility and unit tests
---
 kamelets/aws-ddb-sink.kamelet.yaml                 |  25 +--
 kamelets/aws-s3-source.kamelet.yaml                |  19 --
 .../transform/aws/ddb/JsonToDdbModelConverter.java | 201 +++++++++++++++++++++
 .../aws/ddb/JsonToDdbModelConverterTest.java       | 184 +++++++++++++++++++
 .../resources/kamelets/aws-ddb-sink.kamelet.yaml   |  25 +--
 .../resources/kamelets/aws-s3-source.kamelet.yaml  |  19 --
 6 files changed, 395 insertions(+), 78 deletions(-)

diff --git a/kamelets/aws-ddb-sink.kamelet.yaml b/kamelets/aws-ddb-sink.kamelet.yaml
index 87b338ee..5b603abf 100644
--- a/kamelets/aws-ddb-sink.kamelet.yaml
+++ b/kamelets/aws-ddb-sink.kamelet.yaml
@@ -97,12 +97,6 @@ spec:
         x-descriptors:
           - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
         default: false
-      inputFormat:
-        title: Input Type
-        description: Specify the input type for this Kamelet. The Kamelet will automatically apply conversion logic in order to transform message content to this data type.
-        type: string
-        default: json
-        example: json
   types:
     in:
       mediaType: application/json
@@ -113,26 +107,17 @@ spec:
   - "camel:aws2-ddb"
   - "camel:kamelet"
   template:
-    beans:
-    - name: dataTypeRegistry
-      type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry"
-    - name: inputTypeProcessor
-      type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor"
-      property:
-        - key: scheme
-          value: 'aws2-ddb'
-        - key: format
-          value: '{{inputFormat}}'
-        - key: registry
-          value: '#bean:{{dataTypeRegistry}}'
     from:
       uri: "kamelet:source"
       steps:
       - set-property:
           name: operation
           constant: "{{operation}}"
-      - process:
-          ref: "{{inputTypeProcessor}}"
+      - unmarshal:
+          json:
+            library: Jackson
+            unmarshalType: com.fasterxml.jackson.databind.JsonNode
+      - bean: "org.apache.camel.kamelets.utils.transform.aws.ddb.JsonToDdbModelConverter"
       - to:
           uri: "aws2-ddb:{{table}}"
           parameters:
diff --git a/kamelets/aws-s3-source.kamelet.yaml b/kamelets/aws-s3-source.kamelet.yaml
index d937f6e5..6ab2bca4 100644
--- a/kamelets/aws-s3-source.kamelet.yaml
+++ b/kamelets/aws-s3-source.kamelet.yaml
@@ -107,12 +107,6 @@ spec:
         description: The number of milliseconds before the next poll of the selected bucket.
         type: integer
         default: 500
-      outputFormat:
-        title: Output Type
-        description: Choose the output type for this Kamelet. The Kamelet supports different output types and performs automatic message conversion according to this data type.
-        type: string
-        default: binary
-        example: binary
   dependencies:
     - "camel:core"
     - "camel:aws2-s3"
@@ -120,17 +114,6 @@ spec:
     - "camel:kamelet"
   template:
     beans:
-      - name: dataTypeRegistry
-        type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry"
-      - name: outputTypeProcessor
-        type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor"
-        property:
-          - key: scheme
-            value: 'aws2-s3'
-          - key: format
-            value: '{{outputFormat}}'
-          - key: registry
-            value: '#bean:{{dataTypeRegistry}}'
       - name: renameHeaders
         type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders"
         property:
@@ -160,6 +143,4 @@ spec:
       steps:
       - process:
           ref: "{{renameHeaders}}"
-      - process:
-          ref: "{{outputTypeProcessor}}"
       - to: "kamelet:sink"
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverter.java
new file mode 100644
index 00000000..2a203ed0
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverter.java
@@ -0,0 +1,201 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.kamelets.utils.transform.aws.ddb;
+
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.camel.Exchange;
+import org.apache.camel.ExchangeProperty;
+import org.apache.camel.InvalidPayloadException;
+import org.apache.camel.component.aws2.ddb.Ddb2Constants;
+import org.apache.camel.component.aws2.ddb.Ddb2Operations;
+import software.amazon.awssdk.services.dynamodb.model.AttributeAction;
+import software.amazon.awssdk.services.dynamodb.model.AttributeValue;
+import software.amazon.awssdk.services.dynamodb.model.AttributeValueUpdate;
+import software.amazon.awssdk.services.dynamodb.model.ReturnValue;
+
+/**
+ * Maps Json body to DynamoDB attribute value map and sets the attribute map as Camel DynamoDB header entries.
+ *
+ * Json property names map to attribute keys and Json property values map to attribute values.
+ *
+ * During mapping the Json property types resolve to the respective attribute types ({@code String, StringSet, Boolean, Number, NumberSet, Map, Null}).
+ * Primitive typed arrays in Json get mapped to {@code StringSet} or {@code NumberSet} attribute values.
+ *
+ * For PutItem operation the Json body defines all item attributes.
+ *
+ * For DeleteItem operation the Json body defines only the primary key attributes that identify the item to delete.
+ *
+ * For UpdateItem operation the Json body defines both key attributes to identify the item to be updated and all item attributes tht get updated on the item.
+ *
+ * The given Json body can use "key" and "item" as top level properties.
+ * Both define a Json object that will be mapped to respective attribute value maps:
+ * <pre>{@code
+ * {
+ *   "key": {},
+ *   "item": {}
+ * }
+ * }
+ * </pre>
+ * The converter will extract the objects and set respective attribute value maps as header entries.
+ * This is a comfortable way to define different key and item attribute value maps e.g. on UpdateItem operation.
+ *
+ * In case key and item attribute value maps are identical you can omit the special top level properties completely.
+ * The converter will map the whole Json body as is then and use it as source for the attribute value map.
+ */
+public class JsonToDdbModelConverter {
+
+    public String process(@ExchangeProperty("operation") String operation, Exchange exchange) throws InvalidPayloadException {
+        if (exchange.getMessage().getHeaders().containsKey(Ddb2Constants.ITEM) ||
+                exchange.getMessage().getHeaders().containsKey(Ddb2Constants.KEY)) {
+            return "";
+        }
+
+        ObjectMapper mapper = new ObjectMapper();
+
+        JsonNode jsonBody = exchange.getMessage().getMandatoryBody(JsonNode.class);
+
+        JsonNode key = jsonBody.get("key");
+        JsonNode item = jsonBody.get("item");
+
+        Map<String, Object> keyProps;
+        if (key != null) {
+            keyProps = mapper.convertValue(key, new TypeReference<Map<String, Object>>(){});
+        } else {
+            keyProps = mapper.convertValue(jsonBody, new TypeReference<Map<String, Object>>(){});
+        }
+
+        Map<String, Object> itemProps;
+        if (item != null) {
+            itemProps = mapper.convertValue(item, new TypeReference<Map<String, Object>>(){});
+        } else {
+            itemProps = keyProps;
+        }
+
+        final Map<String, AttributeValue> keyMap = getAttributeValueMap(keyProps);
+
+        switch (Ddb2Operations.valueOf(operation)) {
+            case PutItem:
+                exchange.getMessage().setHeader(Ddb2Constants.OPERATION, Ddb2Operations.PutItem);
+                exchange.getMessage().setHeader(Ddb2Constants.ITEM, getAttributeValueMap(itemProps));
+                setHeaderIfNotPresent(Ddb2Constants.RETURN_VALUES, ReturnValue.ALL_OLD.toString(), exchange);
+                break;
+            case UpdateItem:
+                exchange.getMessage().setHeader(Ddb2Constants.OPERATION, Ddb2Operations.UpdateItem);
+                exchange.getMessage().setHeader(Ddb2Constants.KEY, keyMap);
+                exchange.getMessage().setHeader(Ddb2Constants.UPDATE_VALUES, getAttributeValueUpdateMap(itemProps));
+                setHeaderIfNotPresent(Ddb2Constants.RETURN_VALUES, ReturnValue.ALL_NEW.toString(), exchange);
+                break;
+            case DeleteItem:
+                exchange.getMessage().setHeader(Ddb2Constants.OPERATION, Ddb2Operations.DeleteItem);
+                exchange.getMessage().setHeader(Ddb2Constants.KEY, keyMap);
+                setHeaderIfNotPresent(Ddb2Constants.RETURN_VALUES, ReturnValue.ALL_OLD.toString(), exchange);
+                break;
+            default:
+                throw new UnsupportedOperationException(String.format("Unsupported operation '%s'", operation));
+        }
+
+        return "";
+    }
+
+    private void setHeaderIfNotPresent(String headerName, Object value, Exchange exchange) {
+        exchange.getMessage().setHeader(headerName, value);
+    }
+
+    private Map<String, AttributeValue> getAttributeValueMap(Map<String, Object> body) {
+        final Map<String, AttributeValue> attributeValueMap = new LinkedHashMap<>();
+
+        for (Map.Entry<String, Object> attribute : body.entrySet()) {
+            attributeValueMap.put(attribute.getKey(), getAttributeValue(attribute.getValue()));
+        }
+
+        return attributeValueMap;
+    }
+
+    private Map<String, AttributeValueUpdate> getAttributeValueUpdateMap(Map<String, Object> body) {
+        final Map<String, AttributeValueUpdate> attributeValueMap = new LinkedHashMap<>();
+
+        for (Map.Entry<String, Object> attribute : body.entrySet()) {
+            attributeValueMap.put(attribute.getKey(), getAttributeValueUpdate(attribute.getValue()));
+        }
+
+        return attributeValueMap;
+    }
+
+    private static AttributeValue getAttributeValue(Object value) {
+        if (value == null) {
+            return AttributeValue.builder().nul(true).build();
+        }
+
+        if (value instanceof String) {
+            return AttributeValue.builder().s(value.toString()).build();
+        }
+
+        if (value instanceof Integer) {
+            return AttributeValue.builder().n(value.toString()).build();
+        }
+
+        if (value instanceof Boolean) {
+            return AttributeValue.builder().bool((Boolean) value).build();
+        }
+
+        if (value instanceof String[]) {
+            return AttributeValue.builder().ss((String[]) value).build();
+        }
+
+        if (value instanceof int[]) {
+            return AttributeValue.builder().ns(Stream.of((int[]) value).map(Object::toString).collect(Collectors.toList())).build();
+        }
+
+        if (value instanceof List) {
+            List<?> values = ((List<?>) value);
+
+            if (values.isEmpty()) {
+                return AttributeValue.builder().ss().build();
+            } else if (values.get(0) instanceof Integer) {
+                return AttributeValue.builder().ns(values.stream().map(Object::toString).collect(Collectors.toList())).build();
+            } else {
+                return AttributeValue.builder().ss(values.stream().map(Object::toString).collect(Collectors.toList())).build();
+            }
+        }
+
+        if (value instanceof Map) {
+            Map<String, AttributeValue> nestedAttributes = new LinkedHashMap<>();
+
+            for (Map.Entry<?, ?> nested : ((Map<?, ?>) value).entrySet()) {
+                nestedAttributes.put(nested.getKey().toString(), getAttributeValue(nested.getValue()));
+            }
+
+            return AttributeValue.builder().m(nestedAttributes).build();
+        }
+
+        return AttributeValue.builder().s(value.toString()).build();
+    }
+
+    private static AttributeValueUpdate getAttributeValueUpdate(Object value) {
+        return AttributeValueUpdate.builder()
+                .action(AttributeAction.PUT)
+                .value(getAttributeValue(value)).build();
+    }
+}
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverterTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverterTest.java
new file mode 100644
index 00000000..e88dce4e
--- /dev/null
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverterTest.java
@@ -0,0 +1,184 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.kamelets.utils.transform.aws.ddb;
+
+import java.util.Map;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.camel.Exchange;
+import org.apache.camel.component.aws2.ddb.Ddb2Constants;
+import org.apache.camel.component.aws2.ddb.Ddb2Operations;
+import org.apache.camel.impl.DefaultCamelContext;
+import org.apache.camel.support.DefaultExchange;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import software.amazon.awssdk.services.dynamodb.model.AttributeAction;
+import software.amazon.awssdk.services.dynamodb.model.AttributeValue;
+import software.amazon.awssdk.services.dynamodb.model.AttributeValueUpdate;
+import software.amazon.awssdk.services.dynamodb.model.ReturnValue;
+
+class JsonToDdbModelConverterTest {
+
+    private DefaultCamelContext camelContext;
+
+    private final ObjectMapper mapper = new ObjectMapper();
+
+    private final JsonToDdbModelConverter processor = new JsonToDdbModelConverter();
+
+    private final String keyJson = "{" +
+            "\"name\": \"Rajesh Koothrappali\"" +
+            "}";
+
+    private final String itemJson = "{" +
+            "\"name\": \"Rajesh Koothrappali\"," +
+            "\"age\": 29," +
+            "\"super-heroes\": [\"batman\", \"spiderman\", \"wonderwoman\"]," +
+            "\"issues\": [5, 3, 9, 1]," +
+            "\"girlfriend\": null," +
+            "\"doctorate\": true" +
+            "}";
+
+    @BeforeEach
+    void setup() {
+        this.camelContext = new DefaultCamelContext();
+    }
+
+    @Test
+    @SuppressWarnings("unchecked")
+    void shouldMapPutItemHeaders() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.getMessage().setBody(mapper.readTree(itemJson));
+
+        processor.process(Ddb2Operations.PutItem.name(), exchange);
+
+        Assertions.assertTrue(exchange.getMessage().hasHeaders());
+        Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION));
+        Assertions.assertEquals(ReturnValue.ALL_OLD.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES));
+
+        assertAttributeValueMap(exchange.getMessage().getHeader(Ddb2Constants.ITEM, Map.class));
+    }
+
+    @Test
+    @SuppressWarnings("unchecked")
+    void shouldMapUpdateItemHeaders() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.getMessage().setBody(mapper.readTree("{\"key\": " + keyJson + ", \"item\": " + itemJson + "}"));
+
+        processor.process(Ddb2Operations.UpdateItem.name(), exchange);
+
+        Assertions.assertTrue(exchange.getMessage().hasHeaders());
+        Assertions.assertEquals(Ddb2Operations.UpdateItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION));
+        Assertions.assertEquals(ReturnValue.ALL_NEW.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES));
+
+        Map<String, AttributeValue> attributeValueMap = exchange.getMessage().getHeader(Ddb2Constants.KEY, Map.class);
+        Assertions.assertEquals(1L, attributeValueMap.size());
+        Assertions.assertEquals(AttributeValue.builder().s("Rajesh Koothrappali").build(), attributeValueMap.get("name"));
+
+        assertAttributeValueUpdateMap(exchange.getMessage().getHeader(Ddb2Constants.UPDATE_VALUES, Map.class));
+    }
+
+    @Test
+    @SuppressWarnings("unchecked")
+    void shouldMapDeleteItemHeaders() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.getMessage().setBody(mapper.readTree("{\"key\": " + keyJson + "}"));
+
+        processor.process(Ddb2Operations.DeleteItem.name(), exchange);
+
+        Assertions.assertTrue(exchange.getMessage().hasHeaders());
+        Assertions.assertEquals(Ddb2Operations.DeleteItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION));
+        Assertions.assertEquals(ReturnValue.ALL_OLD.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES));
+
+        Map<String, AttributeValue> attributeValueMap = exchange.getMessage().getHeader(Ddb2Constants.KEY, Map.class);
+        Assertions.assertEquals(1L, attributeValueMap.size());
+        Assertions.assertEquals(AttributeValue.builder().s("Rajesh Koothrappali").build(), attributeValueMap.get("name"));
+    }
+
+    @Test
+    @SuppressWarnings("unchecked")
+    void shouldMapNestedObjects() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.getMessage().setBody(mapper.readTree("{\"user\":" + itemJson + "}"));
+
+        processor.process(Ddb2Operations.PutItem.name(), exchange);
+
+        Assertions.assertTrue(exchange.getMessage().hasHeaders());
+        Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION));
+        Assertions.assertEquals(ReturnValue.ALL_OLD.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES));
+
+        Map<String, AttributeValue> attributeValueMap = exchange.getMessage().getHeader(Ddb2Constants.ITEM, Map.class);
+        Assertions.assertEquals(1L, attributeValueMap.size());
+
+        Assertions.assertEquals("AttributeValue(M={name=AttributeValue(S=Rajesh Koothrappali), " +
+                "age=AttributeValue(N=29), " +
+                "super-heroes=AttributeValue(SS=[batman, spiderman, wonderwoman]), " +
+                "issues=AttributeValue(NS=[5, 3, 9, 1]), " +
+                "girlfriend=AttributeValue(NUL=true), " +
+                "doctorate=AttributeValue(BOOL=true)})", attributeValueMap.get("user").toString());
+    }
+
+    @Test
+    @SuppressWarnings("unchecked")
+    void shouldMapEmptyJson() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.getMessage().setBody(mapper.readTree("{}"));
+
+        processor.process(Ddb2Operations.PutItem.name(), exchange);
+
+        Assertions.assertTrue(exchange.getMessage().hasHeaders());
+        Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION));
+        Assertions.assertEquals(ReturnValue.ALL_OLD.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES));
+
+        Map<String, AttributeValue> attributeValueMap = exchange.getMessage().getHeader(Ddb2Constants.ITEM, Map.class);
+        Assertions.assertEquals(0L, attributeValueMap.size());
+    }
+
+    @Test()
+    void shouldFailForUnsupportedOperation() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.getMessage().setBody(mapper.readTree("{}"));
+
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> processor.process(Ddb2Operations.BatchGetItems.name(), exchange));
+    }
+
+    private void assertAttributeValueMap(Map<String, AttributeValue> attributeValueMap) {
+        Assertions.assertEquals(6L, attributeValueMap.size());
+        Assertions.assertEquals(AttributeValue.builder().s("Rajesh Koothrappali").build(), attributeValueMap.get("name"));
+        Assertions.assertEquals(AttributeValue.builder().n("29").build(), attributeValueMap.get("age"));
+        Assertions.assertEquals(AttributeValue.builder().ss("batman", "spiderman", "wonderwoman").build(), attributeValueMap.get("super-heroes"));
+        Assertions.assertEquals(AttributeValue.builder().ns("5", "3", "9", "1").build(), attributeValueMap.get("issues"));
+        Assertions.assertEquals(AttributeValue.builder().nul(true).build(), attributeValueMap.get("girlfriend"));
+        Assertions.assertEquals(AttributeValue.builder().bool(true).build(), attributeValueMap.get("doctorate"));
+    }
+
+    private void assertAttributeValueUpdateMap(Map<String, AttributeValueUpdate> attributeValueMap) {
+        Assertions.assertEquals(6L, attributeValueMap.size());
+        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().s("Rajesh Koothrappali").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("name"));
+        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().n("29").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("age"));
+        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().ss("batman", "spiderman", "wonderwoman").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("super-heroes"));
+        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().ns("5", "3", "9", "1").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("issues"));
+        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().nul(true).build()).action(AttributeAction.PUT).build(), attributeValueMap.get("girlfriend"));
+        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().bool(true).build()).action(AttributeAction.PUT).build(), attributeValueMap.get("doctorate"));
+    }
+}
\ No newline at end of file
diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml
index 87b338ee..5b603abf 100644
--- a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml
+++ b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml
@@ -97,12 +97,6 @@ spec:
         x-descriptors:
           - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
         default: false
-      inputFormat:
-        title: Input Type
-        description: Specify the input type for this Kamelet. The Kamelet will automatically apply conversion logic in order to transform message content to this data type.
-        type: string
-        default: json
-        example: json
   types:
     in:
       mediaType: application/json
@@ -113,26 +107,17 @@ spec:
   - "camel:aws2-ddb"
   - "camel:kamelet"
   template:
-    beans:
-    - name: dataTypeRegistry
-      type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry"
-    - name: inputTypeProcessor
-      type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor"
-      property:
-        - key: scheme
-          value: 'aws2-ddb'
-        - key: format
-          value: '{{inputFormat}}'
-        - key: registry
-          value: '#bean:{{dataTypeRegistry}}'
     from:
       uri: "kamelet:source"
       steps:
       - set-property:
           name: operation
           constant: "{{operation}}"
-      - process:
-          ref: "{{inputTypeProcessor}}"
+      - unmarshal:
+          json:
+            library: Jackson
+            unmarshalType: com.fasterxml.jackson.databind.JsonNode
+      - bean: "org.apache.camel.kamelets.utils.transform.aws.ddb.JsonToDdbModelConverter"
       - to:
           uri: "aws2-ddb:{{table}}"
           parameters:
diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml
index d937f6e5..6ab2bca4 100644
--- a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml
+++ b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml
@@ -107,12 +107,6 @@ spec:
         description: The number of milliseconds before the next poll of the selected bucket.
         type: integer
         default: 500
-      outputFormat:
-        title: Output Type
-        description: Choose the output type for this Kamelet. The Kamelet supports different output types and performs automatic message conversion according to this data type.
-        type: string
-        default: binary
-        example: binary
   dependencies:
     - "camel:core"
     - "camel:aws2-s3"
@@ -120,17 +114,6 @@ spec:
     - "camel:kamelet"
   template:
     beans:
-      - name: dataTypeRegistry
-        type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry"
-      - name: outputTypeProcessor
-        type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor"
-        property:
-          - key: scheme
-            value: 'aws2-s3'
-          - key: format
-            value: '{{outputFormat}}'
-          - key: registry
-            value: '#bean:{{dataTypeRegistry}}'
       - name: renameHeaders
         type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders"
         property:
@@ -160,6 +143,4 @@ spec:
       steps:
       - process:
           ref: "{{renameHeaders}}"
-      - process:
-          ref: "{{outputTypeProcessor}}"
       - to: "kamelet:sink"


[camel-kamelets] 18/28: Fix rest-openapi-sink YAKS test

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit 6f206cabaa028a1862d9aa6684703003b701a262
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Fri Nov 25 10:51:35 2022 +0100

    Fix rest-openapi-sink YAKS test
---
 test/rest-openapi-sink/rest-openapi-sink.feature | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/test/rest-openapi-sink/rest-openapi-sink.feature b/test/rest-openapi-sink/rest-openapi-sink.feature
index 396f23d5..c5de6406 100644
--- a/test/rest-openapi-sink/rest-openapi-sink.feature
+++ b/test/rest-openapi-sink/rest-openapi-sink.feature
@@ -41,7 +41,7 @@ Feature: REST OpenAPI Kamelet sink
     Then send HTTP 200 OK
 
   Scenario: Verify proper addPet request message sent
-    Given expect HTTP request body: citrus:readFile(classpath:openapi.json)
+    Given expect HTTP request body: ${pet}
     And HTTP request header Content-Type is "application/json"
     When receive POST /petstore/pet
     And send HTTP 201 CREATED


[camel-kamelets] 12/28: Fix AWS DDB sink Kamelet

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit ca5892d158a391eed5b41be692a8eeb3af2e382d
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Mon Nov 21 16:32:33 2022 +0100

    Fix AWS DDB sink Kamelet
---
 kamelets/aws-ddb-sink.kamelet.yaml                                    | 4 ++--
 .../src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml             | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/kamelets/aws-ddb-sink.kamelet.yaml b/kamelets/aws-ddb-sink.kamelet.yaml
index 952ecfa1..87b338ee 100644
--- a/kamelets/aws-ddb-sink.kamelet.yaml
+++ b/kamelets/aws-ddb-sink.kamelet.yaml
@@ -129,8 +129,8 @@ spec:
       uri: "kamelet:source"
       steps:
       - set-property:
-        name: operation
-        constant: "{{operation}}"
+          name: operation
+          constant: "{{operation}}"
       - process:
           ref: "{{inputTypeProcessor}}"
       - to:
diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml
index 952ecfa1..87b338ee 100644
--- a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml
+++ b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml
@@ -129,8 +129,8 @@ spec:
       uri: "kamelet:source"
       steps:
       - set-property:
-        name: operation
-        constant: "{{operation}}"
+          name: operation
+          constant: "{{operation}}"
       - process:
           ref: "{{inputTypeProcessor}}"
       - to:


[camel-kamelets] 03/28: Fix Jitpack coordinates replacement and use KinD cluster v0.14.0

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit 4bdbc2d7df0fab51a929ac1bb34ee9f33db25027
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Thu Nov 17 14:18:25 2022 +0100

    Fix Jitpack coordinates replacement and use KinD cluster v0.14.0
---
 .github/workflows/yaks-tests.yaml | 15 +++++++--------
 1 file changed, 7 insertions(+), 8 deletions(-)

diff --git a/.github/workflows/yaks-tests.yaml b/.github/workflows/yaks-tests.yaml
index defc5733..e06d1751 100644
--- a/.github/workflows/yaks-tests.yaml
+++ b/.github/workflows/yaks-tests.yaml
@@ -61,10 +61,10 @@ jobs:
         HEAD_REF: ${{ github.head_ref }}
         HEAD_REPO: ${{ github.event.pull_request.head.repo.full_name }}
       run: |
-        echo "Set JitPack dependency coordinates to ${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF/\//'~'}-SNAPSHOT"
+        echo "Set JitPack dependency coordinates to ${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT"
 
         # Overwrite JitPack coordinates in the local Kamelets so the tests can use the utility classes in this PR
-        find kamelets -maxdepth 1 -name '*.kamelet.yaml' -exec sed  -i "s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF/\//'~'}-SNAPSHOT/g" {} +
+        find kamelets -maxdepth 1 -name '*.kamelet.yaml' -exec sed -i "s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT/g" {} +
     - name: Get Camel K CLI
       run: |
         curl --fail -L --silent https://github.com/apache/camel-k/releases/download/v${CAMEL_K_VERSION}/camel-k-client-${CAMEL_K_VERSION}-linux-64bit.tar.gz -o kamel.tar.gz
@@ -83,6 +83,9 @@ jobs:
         rm -r _yaks
     - name: Kubernetes KinD Cluster
       uses: container-tools/kind-action@v1
+      with:
+        version: v0.14.0
+        node_image: kindest/node:v1.23.6@sha256:b1fa224cc6c7ff32455e0b1fd9cbfd3d3bc87ecaa8fcb06961ed1afb3db0f9ae
     - name: Info
       run: |
         kubectl version
@@ -95,11 +98,7 @@ jobs:
         export KAMEL_INSTALL_REGISTRY=$KIND_REGISTRY
         export KAMEL_INSTALL_REGISTRY_INSECURE=true
 
-        kamel install -w
-
-        # TODO replaces the below statement with --operator-env-vars KAMEL_INSTALL_DEFAULT_KAMELETS=false
-        # when we use camel k 1.8.0
-        kubectl delete kamelets --all
+        kamel install -w --operator-env-vars KAMEL_INSTALL_DEFAULT_KAMELETS=false
 
         # Install the local kamelets
         find kamelets -maxdepth 1 -name '*.kamelet.yaml' -exec kubectl apply -f {} \;
@@ -110,7 +109,7 @@ jobs:
       run: |
         echo "Running tests"
         yaks run test/aws-ddb-sink $YAKS_RUN_OPTIONS
-        yaks run test/aws-s3 $YAKS_RUN_OPTIONS
+        yaks run test/aws-s3/aws-s3-uri-binding.feature $YAKS_RUN_OPTIONS
         yaks run test/extract-field-action $YAKS_RUN_OPTIONS
         yaks run test/insert-field-action $YAKS_RUN_OPTIONS
         yaks run test/mail-sink $YAKS_RUN_OPTIONS


[camel-kamelets] 14/28: Fix cloud event type and do not set data content type

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit 49cbe909ae5bb15537ecdcd851ef16c23e2a65ad
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Mon Nov 21 22:01:09 2022 +0100

    Fix cloud event type and do not set data content type
    
    Setting the data content type breaks the Camel Knative producer
---
 .../format/converter/aws2/s3/AWS2S3CloudEventOutputType.java      | 8 +-------
 .../format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java  | 2 +-
 2 files changed, 2 insertions(+), 8 deletions(-)

diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
index 13579054..399e0111 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java
@@ -40,16 +40,10 @@ public class AWS2S3CloudEventOutputType implements DataTypeConverter {
     public void convert(Exchange exchange) {
         final Map<String, Object> headers = exchange.getMessage().getHeaders();
 
-        headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TYPE, "kamelet:aws-s3-source");
+        headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TYPE, "kamelet.aws.s3.source");
         headers.put(CloudEvent.CAMEL_CLOUD_EVENT_SOURCE, exchange.getMessage().getHeader(AWS2S3Constants.BUCKET_NAME, String.class));
         headers.put(CloudEvent.CAMEL_CLOUD_EVENT_SUBJECT, exchange.getMessage().getHeader(AWS2S3Constants.KEY, String.class));
         headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TIME, getEventTime(exchange));
-        headers.put(CloudEvent.CAMEL_CLOUD_EVENT_DATA_CONTENT_TYPE, exchange.getMessage().getHeader(AWS2S3Constants.CONTENT_TYPE, String.class));
-
-        String encoding = exchange.getMessage().getHeader(AWS2S3Constants.CONTENT_ENCODING, String.class);
-        if (encoding != null) {
-            headers.put(CloudEvent.CAMEL_CLOUD_EVENT_DATA_CONTENT_ENCODING, encoding);
-        }
     }
 
     private String getEventTime(Exchange exchange) {
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
index 0a71f90d..e139b2b9 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java
@@ -53,7 +53,7 @@ class AWS2S3CloudEventOutputTypeTest {
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
         Assertions.assertTrue(exchange.getMessage().getHeaders().containsKey(AWS2S3Constants.KEY));
-        assertEquals("kamelet:aws-s3-source", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_TYPE));
+        assertEquals("kamelet.aws.s3.source", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_TYPE));
         assertEquals("test1.txt", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SUBJECT));
         assertEquals("myBucket", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SOURCE));
     }


[camel-kamelets] 05/28: Use log-sink Kamelet and show headers

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit ed28941e70185dcae9ba40c0de9823a85d9c6bbb
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Fri Nov 18 09:30:44 2022 +0100

    Use log-sink Kamelet and show headers
---
 test/aws-s3/aws-s3-uri-binding.yaml | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/test/aws-s3/aws-s3-uri-binding.yaml b/test/aws-s3/aws-s3-uri-binding.yaml
index 14d420f9..e21d54f4 100644
--- a/test/aws-s3/aws-s3-uri-binding.yaml
+++ b/test/aws-s3/aws-s3-uri-binding.yaml
@@ -34,4 +34,9 @@ spec:
       secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}
       region: ${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}
   sink:
-    uri: log:info
+    ref:
+      kind: Kamelet
+      apiVersion: camel.apache.org/v1alpha1
+      name: log-sink
+    properties:
+      showHeaders: true


[camel-kamelets] 01/28: Introduce Kamelet input/output data types

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-kamelets.git

commit 4595e5f2e64ac4b0ad1113867bfd18b9142285cf
Author: Christoph Deppisch <cd...@redhat.com>
AuthorDate: Mon Nov 14 13:15:00 2022 +0100

    Introduce Kamelet input/output data types
    
    - Introduce data type converters
    - Add data type processor to auto convert exchange message from/to given data type
    - Let user choose which data type to use (via Kamelet property)
    - Add data type registry and annotation based loader to find data type implementations by component scheme and name
    
    Relates to CAMEL-18698 and apache/camel-k#1980
---
 .github/workflows/yaks-tests.yaml                  |   3 +-
 kamelets/aws-ddb-sink.kamelet.yaml                 |  27 +++-
 kamelets/aws-s3-source.kamelet.yaml                |  17 +++
 library/camel-kamelets-utils/pom.xml               |   7 +-
 .../utils/format/AnnotationDataTypeLoader.java     | 152 ++++++++++++++++++++
 .../kamelets/utils/format/DataTypeProcessor.java   |  67 +++++++++
 .../utils/format/DefaultDataTypeConverter.java     |  54 ++++++++
 .../utils/format/DefaultDataTypeRegistry.java      | 154 +++++++++++++++++++++
 .../converter/aws2/ddb/Ddb2JsonInputType.java}     |  87 ++++++++----
 .../converter/aws2/s3/AWS2S3BinaryOutputType.java  |  55 ++++++++
 .../converter/aws2/s3/AWS2S3JsonOutputType.java    |  63 +++++++++
 .../converter/standard/JsonModelDataType.java      |  66 +++++++++
 .../utils/format/spi/DataTypeConverter.java        |  39 ++++++
 .../kamelets/utils/format/spi/DataTypeLoader.java  |  31 +++++
 .../utils/format/spi/DataTypeRegistry.java         |  60 ++++++++
 .../utils/format/spi/annotations/DataType.java     |  51 +++++++
 .../META-INF/services/org/apache/camel/DataType    |  20 +++
 .../utils/format/DefaultDataTypeRegistryTest.java  |  57 ++++++++
 .../converter/aws2/ddb/Ddb2JsonInputTypeTest.java} | 104 +++++++++-----
 .../aws2/s3/AWS2S3JsonOutputTypeTest.java          |  98 +++++++++++++
 .../converter/standard/JsonModelDataTypeTest.java  |  84 +++++++++++
 .../src/test/resources/log4j2-test.xml             |  32 +++++
 .../resources/kamelets/aws-ddb-sink.kamelet.yaml   |  27 +++-
 .../resources/kamelets/aws-s3-source.kamelet.yaml  |  17 +++
 test/aws-s3/README.md                              |  76 ++++++++++
 test/aws-s3/amazonS3Client.groovy                  |  36 +++++
 test/aws-s3/aws-s3-credentials.properties          |   7 +
 test/aws-s3/aws-s3-inmem-binding.feature           |  49 +++++++
 test/aws-s3/aws-s3-source-property-conf.feature    |  37 +++++
 test/aws-s3/aws-s3-source-secret-conf.feature      |  39 ++++++
 test/aws-s3/aws-s3-source-uri-conf.feature         |  32 +++++
 test/aws-s3/aws-s3-to-inmem.yaml                   |  39 ++++++
 test/aws-s3/aws-s3-to-log-secret-based.groovy      |  21 +++
 test/aws-s3/aws-s3-to-log-uri-based.groovy         |  29 ++++
 test/aws-s3/aws-s3-uri-binding.feature             |  35 +++++
 test/aws-s3/aws-s3-uri-binding.yaml                |  37 +++++
 test/aws-s3/yaks-config.yaml                       |  65 +++++++++
 test/utils/inmem-to-log.yaml                       |  29 ++++
 38 files changed, 1829 insertions(+), 74 deletions(-)

diff --git a/.github/workflows/yaks-tests.yaml b/.github/workflows/yaks-tests.yaml
index 46acc626..defc5733 100644
--- a/.github/workflows/yaks-tests.yaml
+++ b/.github/workflows/yaks-tests.yaml
@@ -43,7 +43,7 @@ concurrency:
 env:
   CAMEL_K_VERSION: 1.10.3
   YAKS_VERSION: 0.11.0
-  YAKS_IMAGE_NAME: "docker.io/yaks/yaks"
+  YAKS_IMAGE_NAME: "docker.io/citrusframework/yaks"
   YAKS_RUN_OPTIONS: "--timeout=15m"
 
 jobs:
@@ -110,6 +110,7 @@ jobs:
       run: |
         echo "Running tests"
         yaks run test/aws-ddb-sink $YAKS_RUN_OPTIONS
+        yaks run test/aws-s3 $YAKS_RUN_OPTIONS
         yaks run test/extract-field-action $YAKS_RUN_OPTIONS
         yaks run test/insert-field-action $YAKS_RUN_OPTIONS
         yaks run test/mail-sink $YAKS_RUN_OPTIONS
diff --git a/kamelets/aws-ddb-sink.kamelet.yaml b/kamelets/aws-ddb-sink.kamelet.yaml
index 5b603abf..ba200347 100644
--- a/kamelets/aws-ddb-sink.kamelet.yaml
+++ b/kamelets/aws-ddb-sink.kamelet.yaml
@@ -97,6 +97,12 @@ spec:
         x-descriptors:
           - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
         default: false
+      inputFormat:
+        title: Input Type
+        description: Specify the input type for this Kamelet. The Kamelet will automatically apply conversion logic in order to transform message content to this data type.
+        type: string
+        default: json
+        example: json
   types:
     in:
       mediaType: application/json
@@ -107,17 +113,24 @@ spec:
   - "camel:aws2-ddb"
   - "camel:kamelet"
   template:
+    beans:
+    - name: dataTypeRegistry
+      type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry"
+    - name: inputTypeProcessor
+      type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor"
+      property:
+        - key: scheme
+          value: 'aws2-ddb'
+        - key: format
+          value: '{{inputFormat}}'
     from:
       uri: "kamelet:source"
       steps:
       - set-property:
-          name: operation
-          constant: "{{operation}}"
-      - unmarshal:
-          json:
-            library: Jackson
-            unmarshalType: com.fasterxml.jackson.databind.JsonNode
-      - bean: "org.apache.camel.kamelets.utils.transform.aws.ddb.JsonToDdbModelConverter"
+        name: operation
+        constant: "{{operation}}"
+      - process:
+          ref: "{{inputTypeProcessor}}"
       - to:
           uri: "aws2-ddb:{{table}}"
           parameters:
diff --git a/kamelets/aws-s3-source.kamelet.yaml b/kamelets/aws-s3-source.kamelet.yaml
index 6ab2bca4..e09cf4aa 100644
--- a/kamelets/aws-s3-source.kamelet.yaml
+++ b/kamelets/aws-s3-source.kamelet.yaml
@@ -107,6 +107,12 @@ spec:
         description: The number of milliseconds before the next poll of the selected bucket.
         type: integer
         default: 500
+      outputFormat:
+        title: Output Type
+        description: Choose the output type for this Kamelet. The Kamelet supports different output types and performs automatic message conversion according to this data type.
+        type: string
+        default: binary
+        example: binary
   dependencies:
     - "camel:core"
     - "camel:aws2-s3"
@@ -114,6 +120,15 @@ spec:
     - "camel:kamelet"
   template:
     beans:
+      - name: dataTypeRegistry
+        type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry"
+      - name: outputTypeProcessor
+        type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor"
+        property:
+          - key: scheme
+            value: 'aws2-s3'
+          - key: format
+            value: '{{outputFormat}}'
       - name: renameHeaders
         type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders"
         property:
@@ -143,4 +158,6 @@ spec:
       steps:
       - process:
           ref: "{{renameHeaders}}"
+      - process:
+          ref: "{{outputTypeProcessor}}"
       - to: "kamelet:sink"
diff --git a/library/camel-kamelets-utils/pom.xml b/library/camel-kamelets-utils/pom.xml
index 4f848d36..5b1441f3 100644
--- a/library/camel-kamelets-utils/pom.xml
+++ b/library/camel-kamelets-utils/pom.xml
@@ -71,12 +71,17 @@
             <artifactId>camel-kafka</artifactId>
         </dependency>
 
-        <!-- AWS Dynamo DB camel component -->
+        <!-- Optional dependencies for data type conversion -->
         <dependency>
             <groupId>org.apache.camel</groupId>
             <artifactId>camel-aws2-ddb</artifactId>
             <scope>provided</scope>
         </dependency>
+        <dependency>
+            <groupId>org.apache.camel</groupId>
+            <artifactId>camel-aws2-s3</artifactId>
+            <scope>provided</scope>
+        </dependency>
 
         <!-- Test scoped dependencies -->
         <dependency>
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java
new file mode 100644
index 00000000..96ca50eb
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java
@@ -0,0 +1,152 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.URL;
+import java.nio.charset.StandardCharsets;
+import java.util.Enumeration;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.camel.TypeConverterLoaderException;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeRegistry;
+import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
+import org.apache.camel.spi.Injector;
+import org.apache.camel.spi.PackageScanClassResolver;
+import org.apache.camel.util.IOHelper;
+import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Data type loader scans packages for {@link DataTypeConverter} classes annotated with {@link DataType} annotation.
+ */
+public class AnnotationDataTypeLoader implements DataTypeLoader {
+
+    public static final String META_INF_SERVICES = "META-INF/services/org/apache/camel/DataType";
+
+    private static final Logger LOG = LoggerFactory.getLogger(AnnotationDataTypeLoader.class);
+
+    protected final PackageScanClassResolver resolver;
+    protected final Injector injector;
+
+    protected Set<Class<?>> visitedClasses = new HashSet<>();
+    protected Set<String> visitedURIs = new HashSet<>();
+
+    public AnnotationDataTypeLoader(Injector injector, PackageScanClassResolver resolver) {
+        this.injector = injector;
+        this.resolver = resolver;
+    }
+
+    @Override
+    public void load(DataTypeRegistry registry) {
+        Set<String> packages = new HashSet<>();
+
+        LOG.trace("Searching for {} services", META_INF_SERVICES);
+        try {
+            ClassLoader ccl = Thread.currentThread().getContextClassLoader();
+            if (ccl != null) {
+                findPackages(packages, ccl);
+            }
+            findPackages(packages, getClass().getClassLoader());
+            if (packages.isEmpty()) {
+                LOG.debug("No package names found to be used for classpath scanning for annotated data types.");
+                return;
+            }
+        } catch (Exception e) {
+            throw new TypeConverterLoaderException(
+                    "Cannot find package names to be used for classpath scanning for annotated data types.", e);
+        }
+
+        // if there is any packages to scan and load @DataType classes, then do it
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("Found data type packages to scan: {}", String.join(", ", packages));
+        }
+        Set<Class<?>> scannedClasses = resolver.findAnnotated(DataType.class, packages.toArray(new String[]{}));
+        if (!scannedClasses.isEmpty()) {
+            LOG.debug("Found {} packages with {} @DataType classes to load", packages.size(), scannedClasses.size());
+
+            // load all the found classes into the type data type registry
+            for (Class<?> type : scannedClasses) {
+                if (acceptClass(type)) {
+                    if (LOG.isTraceEnabled()) {
+                        LOG.trace("Loading data type annotation: {}", ObjectHelper.name(type));
+                    }
+                    loadDataType(registry, type);
+                }
+            }
+        }
+
+        // now clear the maps so we do not hold references
+        visitedClasses.clear();
+        visitedURIs.clear();
+    }
+
+    private void loadDataType(DataTypeRegistry registry, Class<?> type) {
+        if (visitedClasses.contains(type)) {
+            return;
+        }
+        visitedClasses.add(type);
+
+        try {
+            if (DataTypeConverter.class.isAssignableFrom(type) && type.isAnnotationPresent(DataType.class)) {
+                DataType dt = type.getAnnotation(DataType.class);
+                DataTypeConverter converter = (DataTypeConverter) injector.newInstance(type);
+                registry.addDataTypeConverter(dt.scheme(), converter);
+            }
+        } catch (NoClassDefFoundError e) {
+            LOG.debug("Ignoring converter type: {} as a dependent class could not be found: {}",
+                    type.getCanonicalName(), e, e);
+        }
+    }
+
+    protected boolean acceptClass(Class<?> type) {
+        return true;
+    }
+
+    protected void findPackages(Set<String> packages, ClassLoader classLoader) throws IOException {
+        Enumeration<URL> resources = classLoader.getResources(META_INF_SERVICES);
+        while (resources.hasMoreElements()) {
+            URL url = resources.nextElement();
+            String path = url.getPath();
+            if (!visitedURIs.contains(path)) {
+                // remember we have visited this uri so we wont read it twice
+                visitedURIs.add(path);
+                LOG.debug("Loading file {} to retrieve list of packages, from url: {}", META_INF_SERVICES, url);
+                try (BufferedReader reader = IOHelper.buffered(new InputStreamReader(url.openStream(), StandardCharsets.UTF_8))) {
+                    while (true) {
+                        String line = reader.readLine();
+                        if (line == null) {
+                            break;
+                        }
+                        line = line.trim();
+                        if (line.startsWith("#") || line.length() == 0) {
+                            continue;
+                        }
+                        packages.add(line);
+                    }
+                }
+            }
+        }
+    }
+}
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java
new file mode 100644
index 00000000..859269fe
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format;
+
+import org.apache.camel.BeanInject;
+import org.apache.camel.CamelContext;
+import org.apache.camel.CamelContextAware;
+import org.apache.camel.Exchange;
+import org.apache.camel.Processor;
+
+/**
+ * Processor applies data type conversion based on given format name. Searches for matching data type converter
+ * with given component scheme and format name.
+ */
+public class DataTypeProcessor implements Processor, CamelContextAware {
+
+    private CamelContext camelContext;
+
+    @BeanInject
+    private DefaultDataTypeRegistry dataTypeRegistry;
+
+    private String scheme;
+    private String format;
+
+    @Override
+    public void process(Exchange exchange) throws Exception {
+        if (format == null || format.isEmpty()) {
+            return;
+        }
+
+        dataTypeRegistry.lookup(scheme, format)
+                        .ifPresent(converter -> converter.convert(exchange));
+    }
+
+    public void setFormat(String format) {
+        this.format = format;
+    }
+
+    public void setScheme(String scheme) {
+        this.scheme = scheme;
+    }
+
+    @Override
+    public CamelContext getCamelContext() {
+        return camelContext;
+    }
+
+    @Override
+    public void setCamelContext(CamelContext camelContext) {
+        this.camelContext = camelContext;
+    }
+}
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java
new file mode 100644
index 00000000..11680b50
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+
+/**
+ * Default data type converter receives a name and a target type in order to use traditional exchange body conversion
+ * mechanisms in order to transform the message body to a given type.
+ */
+public class DefaultDataTypeConverter implements DataTypeConverter {
+
+    private final String name;
+    private final Class<?> type;
+
+    public DefaultDataTypeConverter(String name, Class<?> type) {
+        this.name = name;
+        this.type = type;
+    }
+
+    @Override
+    public void convert(Exchange exchange) {
+        if (type.isInstance(exchange.getMessage().getBody())) {
+            return;
+        }
+
+        exchange.getMessage().setBody(exchange.getMessage().getBody(type));
+    }
+
+    @Override
+    public String getName() {
+        return name;
+    }
+
+    public Class<?> getType() {
+        return type;
+    }
+}
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
new file mode 100644
index 00000000..e7c6e3e8
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+import org.apache.camel.CamelContext;
+import org.apache.camel.CamelContextAware;
+import org.apache.camel.ExtendedCamelContext;
+import org.apache.camel.RuntimeCamelException;
+import org.apache.camel.impl.engine.DefaultInjector;
+import org.apache.camel.impl.engine.DefaultPackageScanClassResolver;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeRegistry;
+import org.apache.camel.spi.PackageScanClassResolver;
+import org.apache.camel.support.service.ServiceSupport;
+
+/**
+ * Default data type registry able to resolve data types converters in the project. Data types may be defined at the component level
+ * via {@link org.apache.camel.kamelets.utils.format.spi.annotations.DataType} annotations. Also, users can add data types directly
+ * to the Camel context or manually to the registry.
+ *
+ * The registry is able to retrieve converters for a given data type based on the component scheme and the given data type name.
+ */
+public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeRegistry, CamelContextAware {
+
+    private CamelContext camelContext;
+
+    private PackageScanClassResolver resolver;
+
+    protected final List<DataTypeLoader> dataTypeLoaders = new ArrayList<>();
+
+    private final Map<String, List<DataTypeConverter>> dataTypeConverters = new HashMap<>();
+
+    @Override
+    public void addDataTypeConverter(String scheme, DataTypeConverter converter) {
+        this.getComponentDataTypeConverters(scheme).add(converter);
+    }
+
+    @Override
+    public Optional<DataTypeConverter> lookup(String scheme, String name) {
+        if (dataTypeLoaders.isEmpty()) {
+            try {
+                doInit();
+            } catch (Exception e) {
+                throw new RuntimeCamelException("Failed to initialize data type registry", e);
+            }
+        }
+
+        if (name == null) {
+            return Optional.empty();
+        }
+
+        Optional<DataTypeConverter> componentDataTypeConverter = getComponentDataTypeConverters(scheme).stream()
+                .filter(dtc -> name.equals(dtc.getName()))
+                .findFirst();
+
+        if (componentDataTypeConverter.isPresent()) {
+            return componentDataTypeConverter;
+        }
+
+        return getDefaultDataTypeConverter(name);
+    }
+
+    @Override
+    protected void doInit() throws Exception {
+        super.doInit();
+
+        if (resolver == null) {
+            if (camelContext != null) {
+                resolver = camelContext.adapt(ExtendedCamelContext.class).getPackageScanClassResolver();
+            } else {
+                resolver = new DefaultPackageScanClassResolver();
+            }
+        }
+
+        dataTypeLoaders.add(new AnnotationDataTypeLoader(new DefaultInjector(camelContext), resolver));
+
+        addDataTypeConverter(new DefaultDataTypeConverter("string", String.class));
+        addDataTypeConverter(new DefaultDataTypeConverter("binary", byte[].class));
+
+        for (DataTypeLoader loader : dataTypeLoaders) {
+            CamelContextAware.trySetCamelContext(loader, getCamelContext());
+            loader.load(this);
+        }
+    }
+
+    @Override
+    protected void doStop() throws Exception {
+        super.doStop();
+
+        this.dataTypeConverters.clear();
+    }
+
+    /**
+     * Retrieve default data output type from Camel context for given format name.
+     * @param name
+     * @return
+     */
+    private Optional<DataTypeConverter> getDefaultDataTypeConverter(String name) {
+        Optional<DataTypeConverter> dataTypeConverter = getComponentDataTypeConverters("camel").stream()
+                .filter(dtc -> name.equals(dtc.getName()))
+                .findFirst();
+
+        if (dataTypeConverter.isPresent()) {
+            return dataTypeConverter;
+        }
+
+        return Optional.ofNullable(camelContext.getRegistry().lookupByNameAndType(name, DataTypeConverter.class));
+    }
+
+    /**
+     * Retrieve list of data types defined on the component level for given scheme.
+     * @param scheme
+     * @return
+     */
+    private List<DataTypeConverter> getComponentDataTypeConverters(String scheme) {
+        if (!dataTypeConverters.containsKey(scheme)) {
+            dataTypeConverters.put(scheme, new ArrayList<>());
+        }
+
+        return dataTypeConverters.get(scheme);
+    }
+
+    @Override
+    public CamelContext getCamelContext() {
+        return camelContext;
+    }
+
+    @Override
+    public void setCamelContext(CamelContext camelContext) {
+        this.camelContext = camelContext;
+    }
+}
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputType.java
similarity index 69%
rename from library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverter.java
rename to library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputType.java
index c5098c1c..a15ff3a0 100644
--- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverter.java
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputType.java
@@ -14,22 +14,27 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.camel.kamelets.utils.transform.aws.ddb;
 
+package org.apache.camel.kamelets.utils.format.converter.aws2.ddb;
+
+import java.io.InputStream;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Optional;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
 import com.fasterxml.jackson.core.type.TypeReference;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.camel.CamelExecutionException;
 import org.apache.camel.Exchange;
-import org.apache.camel.ExchangeProperty;
-import org.apache.camel.InvalidPayloadException;
 import org.apache.camel.component.aws2.ddb.Ddb2Constants;
 import org.apache.camel.component.aws2.ddb.Ddb2Operations;
+import org.apache.camel.component.jackson.JacksonDataFormat;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
 import software.amazon.awssdk.services.dynamodb.model.AttributeAction;
 import software.amazon.awssdk.services.dynamodb.model.AttributeValue;
 import software.amazon.awssdk.services.dynamodb.model.AttributeValueUpdate;
@@ -40,55 +45,78 @@ import software.amazon.awssdk.services.dynamodb.model.ReturnValue;
  *
  * Json property names map to attribute keys and Json property values map to attribute values.
  *
- * During mapping the Json property types resolve to the respective attribute types ({@code String, StringSet, Boolean, Number, NumberSet, Map, Null}).
- * Primitive typed arrays in Json get mapped to {@code StringSet} or {@code NumberSet} attribute values.
+ * During mapping the Json property types resolve to the respective attribute types
+ * ({@code String, StringSet, Boolean, Number, NumberSet, Map, Null}). Primitive typed arrays in Json get mapped to
+ * {@code StringSet} or {@code NumberSet} attribute values.
+ *
+ * The input type supports the operations: PutItem, UpdateItem, DeleteItem
  *
  * For PutItem operation the Json body defines all item attributes.
  *
  * For DeleteItem operation the Json body defines only the primary key attributes that identify the item to delete.
  *
- * For UpdateItem operation the Json body defines both key attributes to identify the item to be updated and all item attributes tht get updated on the item.
+ * For UpdateItem operation the Json body defines both key attributes to identify the item to be updated and all item
+ * attributes tht get updated on the item.
+ *
+ * The given Json body can use "operation", "key" and "item" as top level properties. Both define a Json object that
+ * will be mapped to respective attribute value maps:
  *
- * The given Json body can use "key" and "item" as top level properties.
- * Both define a Json object that will be mapped to respective attribute value maps:
- * <pre>{@code
+ * <pre>
+ * {@code
  * {
+ *   "operation": "PutItem"
  *   "key": {},
  *   "item": {}
  * }
  * }
  * </pre>
- * The converter will extract the objects and set respective attribute value maps as header entries.
- * This is a comfortable way to define different key and item attribute value maps e.g. on UpdateItem operation.
  *
- * In case key and item attribute value maps are identical you can omit the special top level properties completely.
- * The converter will map the whole Json body as is then and use it as source for the attribute value map.
+ * The converter will extract the objects and set respective attribute value maps as header entries. This is a
+ * comfortable way to define different key and item attribute value maps e.g. on UpdateItem operation.
+ *
+ * In case key and item attribute value maps are identical you can omit the special top level properties completely. The
+ * converter will map the whole Json body as is then and use it as source for the attribute value map.
  */
-public class JsonToDdbModelConverter {
+@DataType(scheme = "aws2-ddb", name = "json")
+public class Ddb2JsonInputType implements DataTypeConverter {
+
+    private final JacksonDataFormat dataFormat = new JacksonDataFormat(new ObjectMapper(), JsonNode.class);
 
-    public String process(@ExchangeProperty("operation") String operation, Exchange exchange) throws InvalidPayloadException {
+    @Override
+    public void convert(Exchange exchange) {
         if (exchange.getMessage().getHeaders().containsKey(Ddb2Constants.ITEM) ||
                 exchange.getMessage().getHeaders().containsKey(Ddb2Constants.KEY)) {
-            return "";
+            return;
         }
 
-        ObjectMapper mapper = new ObjectMapper();
+        JsonNode jsonBody = getBodyAsJsonNode(exchange);
+
+        String operation
+                = Optional.ofNullable(jsonBody.get("operation")).map(JsonNode::asText).orElse(Ddb2Operations.PutItem.name());
+        if (exchange.hasProperties() && exchange.getProperty("operation", String.class) != null) {
+            operation = exchange.getProperty("operation", String.class);
+        }
 
-        JsonNode jsonBody = exchange.getMessage().getMandatoryBody(JsonNode.class);
+        if (exchange.getIn().getHeaders().containsKey(Ddb2Constants.OPERATION)) {
+            operation = exchange.getIn().getHeader(Ddb2Constants.OPERATION, Ddb2Operations.class).name();
+        }
 
         JsonNode key = jsonBody.get("key");
         JsonNode item = jsonBody.get("item");
 
         Map<String, Object> keyProps;
         if (key != null) {
-            keyProps = mapper.convertValue(key, new TypeReference<Map<String, Object>>(){});
+            keyProps = dataFormat.getObjectMapper().convertValue(key, new TypeReference<Map<String, Object>>() {
+            });
         } else {
-            keyProps = mapper.convertValue(jsonBody, new TypeReference<Map<String, Object>>(){});
+            keyProps = dataFormat.getObjectMapper().convertValue(jsonBody, new TypeReference<Map<String, Object>>() {
+            });
         }
 
         Map<String, Object> itemProps;
         if (item != null) {
-            itemProps = mapper.convertValue(item, new TypeReference<Map<String, Object>>(){});
+            itemProps = dataFormat.getObjectMapper().convertValue(item, new TypeReference<Map<String, Object>>() {
+            });
         } else {
             itemProps = keyProps;
         }
@@ -115,8 +143,18 @@ public class JsonToDdbModelConverter {
             default:
                 throw new UnsupportedOperationException(String.format("Unsupported operation '%s'", operation));
         }
+    }
 
-        return "";
+    private JsonNode getBodyAsJsonNode(Exchange exchange) {
+        try {
+            if (exchange.getMessage().getBody() instanceof JsonNode) {
+                return exchange.getMessage().getMandatoryBody(JsonNode.class);
+            }
+
+            return (JsonNode) dataFormat.unmarshal(exchange, exchange.getMessage().getMandatoryBody(InputStream.class));
+        } catch (Exception e) {
+            throw new CamelExecutionException("Failed to get mandatory Json node from message body", exchange, e);
+        }
     }
 
     private void setHeaderIfNotPresent(String headerName, Object value, Exchange exchange) {
@@ -165,11 +203,12 @@ public class JsonToDdbModelConverter {
         }
 
         if (value instanceof int[]) {
-            return AttributeValue.builder().ns(Stream.of((int[]) value).map(Object::toString).collect(Collectors.toList())).build();
+            return AttributeValue.builder().ns(Stream.of((int[]) value).map(Object::toString).collect(Collectors.toList()))
+                    .build();
         }
 
         if (value instanceof List) {
-            List<?> values = ((List<?>) value);
+            List<?> values = (List<?>) value;
 
             if (values.isEmpty()) {
                 return AttributeValue.builder().ss().build();
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java
new file mode 100644
index 00000000..6065ebd1
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format.converter.aws2.s3;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.camel.CamelExecutionException;
+import org.apache.camel.Exchange;
+import org.apache.camel.InvalidPayloadException;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
+import software.amazon.awssdk.utils.IoUtils;
+
+/**
+ * Binary output type.
+ */
+@DataType(scheme = "aws2-s3", name = "binary")
+public class AWS2S3BinaryOutputType implements DataTypeConverter {
+
+    @Override
+    public void convert(Exchange exchange) {
+        if (exchange.getMessage().getBody() instanceof byte[]) {
+            return;
+        }
+
+        try {
+            InputStream is = exchange.getMessage().getBody(InputStream.class);
+            if (is != null) {
+                exchange.getMessage().setBody(IoUtils.toByteArray(is));
+                return;
+            }
+
+            // Use default Camel converter utils to convert body to byte[]
+            exchange.getMessage().setBody(exchange.getMessage().getMandatoryBody(byte[].class));
+        } catch (IOException | InvalidPayloadException e) {
+            throw new CamelExecutionException("Failed to convert AWS S3 body to byte[]", exchange, e);
+        }
+    }
+}
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputType.java
new file mode 100644
index 00000000..74736d67
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputType.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format.converter.aws2.s3;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+
+import org.apache.camel.CamelExecutionException;
+import org.apache.camel.Exchange;
+import org.apache.camel.component.aws2.s3.AWS2S3Constants;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
+import software.amazon.awssdk.core.ResponseInputStream;
+import software.amazon.awssdk.utils.IoUtils;
+
+/**
+ * Json output data type represents file name as key and file content as Json structure.
+ * <p/>
+ * Example Json structure: { "key": "myFile.txt", "content": "Hello", }
+ */
+@DataType(scheme = "aws2-s3", name = "json")
+public class AWS2S3JsonOutputType implements DataTypeConverter {
+
+    private static final String TEMPLATE = "{" +
+            "\"key\": \"%s\", " +
+            "\"content\": \"%s\"" +
+            "}";
+
+    @Override
+    public void convert(Exchange exchange) {
+        String key = exchange.getMessage().getHeader(AWS2S3Constants.KEY, String.class);
+
+        ResponseInputStream<?> bodyInputStream = exchange.getMessage().getBody(ResponseInputStream.class);
+        if (bodyInputStream != null) {
+            try {
+                exchange.getMessage().setBody(String.format(TEMPLATE, key, IoUtils.toUtf8String(bodyInputStream)));
+                return;
+            } catch (IOException e) {
+                throw new CamelExecutionException("Failed to convert AWS S3 body to Json", exchange, e);
+            }
+        }
+
+        byte[] bodyContent = exchange.getMessage().getBody(byte[].class);
+        if (bodyContent != null) {
+            exchange.getMessage().setBody(String.format(TEMPLATE, key, new String(bodyContent, StandardCharsets.UTF_8)));
+        }
+    }
+}
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java
new file mode 100644
index 00000000..047e6dd5
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format.converter.standard;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.camel.CamelExecutionException;
+import org.apache.camel.Exchange;
+import org.apache.camel.InvalidPayloadException;
+import org.apache.camel.component.jackson.JacksonDataFormat;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
+
+/**
+ * Data type converter able to unmarshal to given unmarshalType using jackson data format.
+ * <p/>
+ * Unmarshal type should be given as a fully qualified class name in the exchange properties.
+ */
+@DataType(name = "jsonObject")
+public class JsonModelDataType implements DataTypeConverter {
+
+    public static final String JSON_DATA_TYPE_KEY = "CamelJsonModelDataType";
+
+    @Override
+    public void convert(Exchange exchange) {
+        if (!exchange.hasProperties() || !exchange.getProperties().containsKey(JSON_DATA_TYPE_KEY)) {
+            return;
+        }
+
+        String type = exchange.getProperty(JSON_DATA_TYPE_KEY, String.class);
+        try (JacksonDataFormat dataFormat = new JacksonDataFormat(new ObjectMapper(), Class.forName(type))) {
+            Object unmarshalled = dataFormat.unmarshal(exchange, getBodyAsStream(exchange));
+            exchange.getMessage().setBody(unmarshalled);
+        } catch (Exception e) {
+            throw new CamelExecutionException(
+                    String.format("Failed to load Json unmarshalling type '%s'", type), exchange, e);
+        }
+    }
+
+    private InputStream getBodyAsStream(Exchange exchange) throws InvalidPayloadException {
+        InputStream bodyStream = exchange.getMessage().getBody(InputStream.class);
+
+        if (bodyStream == null) {
+            bodyStream = new ByteArrayInputStream(exchange.getMessage().getMandatoryBody(byte[].class));
+        }
+
+        return bodyStream;
+    }
+}
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java
new file mode 100644
index 00000000..d39d30f8
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format.spi;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.kamelets.utils.format.spi.annotations.DataType;
+
+@FunctionalInterface
+public interface DataTypeConverter {
+
+    void convert(Exchange exchange);
+
+    /**
+     * Gets the data type converter name. Automatically derives the name from given type annotation.
+     * @return
+     */
+    default String getName() {
+        if (this.getClass().isAnnotationPresent(DataType.class)) {
+            return this.getClass().getAnnotation(DataType.class).name();
+        }
+
+        throw new UnsupportedOperationException("Missing data type converter name");
+    }
+}
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java
new file mode 100644
index 00000000..73f87c69
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format.spi;
+
+/**
+ * A pluggable strategy to load data types into a {@link DataTypeRegistry}.
+ */
+public interface DataTypeLoader {
+
+    /**
+     * A pluggable strategy to load data types into a registry.
+     *
+     * @param  registry the registry to load the data types into
+     */
+    void load(DataTypeRegistry registry);
+}
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java
new file mode 100644
index 00000000..cb2bedc9
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format.spi;
+
+import java.util.Optional;
+
+/**
+ * Registry for data types. Data type loaders should be used to add types to the registry.
+ * <p/>
+ * The registry is able to perform a lookup of a specific data type.
+ */
+public interface DataTypeRegistry {
+
+    /**
+     * Registers a new default data type converter.
+     * @param scheme
+     * @param converter
+     */
+    void addDataTypeConverter(String scheme, DataTypeConverter converter);
+
+    /**
+     * Registers a new default data type converter.
+     * @param converter
+     */
+    default void addDataTypeConverter(DataTypeConverter converter) {
+        addDataTypeConverter("camel", converter);
+    }
+
+    /**
+     * Find data type for given component scheme and data type name.
+     * @param scheme
+     * @param name
+     * @return
+     */
+    Optional<DataTypeConverter> lookup(String scheme, String name);
+
+    /**
+     * Find data type for given data type name.
+     * @param name
+     * @return
+     */
+    default Optional<DataTypeConverter> lookup(String name) {
+        return lookup("camel", name);
+    }
+}
diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java
new file mode 100644
index 00000000..b1d4f5a9
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format.spi.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Data type annotation defines a type with its component scheme, a name and input/output types.
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Documented
+@Target({ ElementType.TYPE })
+public @interface DataType {
+
+    /**
+     * Camel component scheme.
+     * @return
+     */
+    String scheme() default "camel";
+
+    /**
+     * Data type name.
+     * @return
+     */
+    String name();
+
+    /**
+     * The media type associated with this data type.
+     * @return
+     */
+    String mediaType() default "";
+}
diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType
new file mode 100644
index 00000000..b51d3404
--- /dev/null
+++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+org.apache.camel.kamelets.utils.format.converter.standard
+org.apache.camel.kamelets.utils.format.converter.aws2.ddb
+org.apache.camel.kamelets.utils.format.converter.aws2.s3
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java
new file mode 100644
index 00000000..2ee4113e
--- /dev/null
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format;
+
+import java.util.Optional;
+
+import org.apache.camel.CamelContextAware;
+import org.apache.camel.impl.DefaultCamelContext;
+import org.apache.camel.kamelets.utils.format.converter.standard.JsonModelDataType;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+class DefaultDataTypeRegistryTest {
+
+    private DefaultCamelContext camelContext;
+
+    private DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry();
+
+    @BeforeEach
+    void setup() {
+        this.camelContext = new DefaultCamelContext();
+        CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext);
+    }
+
+    @Test
+    public void shouldLookupDefaultDataTypeConverters() throws Exception {
+        Optional<DataTypeConverter> converter = dataTypeRegistry.lookup( "jsonObject");
+        Assertions.assertTrue(converter.isPresent());
+        Assertions.assertEquals(JsonModelDataType.class, converter.get().getClass());
+        converter = dataTypeRegistry.lookup( "string");
+        Assertions.assertTrue(converter.isPresent());
+        Assertions.assertEquals(DefaultDataTypeConverter.class, converter.get().getClass());
+        Assertions.assertEquals(String.class, ((DefaultDataTypeConverter) converter.get()).getType());
+        converter = dataTypeRegistry.lookup( "binary");
+        Assertions.assertTrue(converter.isPresent());
+        Assertions.assertEquals(DefaultDataTypeConverter.class, converter.get().getClass());
+        Assertions.assertEquals(byte[].class, ((DefaultDataTypeConverter) converter.get()).getType());
+    }
+
+}
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverterTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputTypeTest.java
similarity index 65%
rename from library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverterTest.java
rename to library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputTypeTest.java
index 33d27bfe..7f1f9e9f 100644
--- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverterTest.java
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputTypeTest.java
@@ -14,16 +14,21 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.camel.kamelets.utils.transform.aws.ddb;
+
+package org.apache.camel.kamelets.utils.format.converter.aws2.ddb;
 
 import java.util.Map;
+import java.util.Optional;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.camel.CamelContextAware;
+import org.apache.camel.CamelExecutionException;
 import org.apache.camel.Exchange;
-import org.apache.camel.InvalidPayloadException;
 import org.apache.camel.component.aws2.ddb.Ddb2Constants;
 import org.apache.camel.component.aws2.ddb.Ddb2Operations;
 import org.apache.camel.impl.DefaultCamelContext;
+import org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
 import org.apache.camel.support.DefaultExchange;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeEach;
@@ -33,25 +38,25 @@ import software.amazon.awssdk.services.dynamodb.model.AttributeValue;
 import software.amazon.awssdk.services.dynamodb.model.AttributeValueUpdate;
 import software.amazon.awssdk.services.dynamodb.model.ReturnValue;
 
-class JsonToDdbModelConverterTest {
+public class Ddb2JsonInputTypeTest {
 
     private DefaultCamelContext camelContext;
 
     private final ObjectMapper mapper = new ObjectMapper();
 
-    private final JsonToDdbModelConverter processor = new JsonToDdbModelConverter();
+    private final Ddb2JsonInputType inputType = new Ddb2JsonInputType();
 
     private final String keyJson = "{" +
-                "\"name\": \"Rajesh Koothrappali\"" +
+            "\"name\": \"Rajesh Koothrappali\"" +
             "}";
 
     private final String itemJson = "{" +
-                "\"name\": \"Rajesh Koothrappali\"," +
-                "\"age\": 29," +
-                "\"super-heroes\": [\"batman\", \"spiderman\", \"wonderwoman\"]," +
-                "\"issues\": [5, 3, 9, 1]," +
-                "\"girlfriend\": null," +
-                "\"doctorate\": true" +
+            "\"name\": \"Rajesh Koothrappali\"," +
+            "\"age\": 29," +
+            "\"super-heroes\": [\"batman\", \"spiderman\", \"wonderwoman\"]," +
+            "\"issues\": [5, 3, 9, 1]," +
+            "\"girlfriend\": null," +
+            "\"doctorate\": true" +
             "}";
 
     @BeforeEach
@@ -65,8 +70,8 @@ class JsonToDdbModelConverterTest {
         Exchange exchange = new DefaultExchange(camelContext);
 
         exchange.getMessage().setBody(mapper.readTree(itemJson));
-
-        processor.process(Ddb2Operations.PutItem.name(), exchange);
+        exchange.setProperty("operation", Ddb2Operations.PutItem.name());
+        inputType.convert(exchange);
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
         Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION));
@@ -80,9 +85,10 @@ class JsonToDdbModelConverterTest {
     void shouldMapUpdateItemHeaders() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
-        exchange.getMessage().setBody(mapper.readTree("{\"key\": " + keyJson + ", \"item\": " + itemJson + "}"));
+        exchange.getMessage().setBody(mapper.readTree("{\"operation\": \"" + Ddb2Operations.UpdateItem.name() + "\", \"key\": "
+                + keyJson + ", \"item\": " + itemJson + "}"));
 
-        processor.process(Ddb2Operations.UpdateItem.name(), exchange);
+        inputType.convert(exchange);
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
         Assertions.assertEquals(Ddb2Operations.UpdateItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION));
@@ -101,8 +107,9 @@ class JsonToDdbModelConverterTest {
         Exchange exchange = new DefaultExchange(camelContext);
 
         exchange.getMessage().setBody(mapper.readTree("{\"key\": " + keyJson + "}"));
+        exchange.setProperty("operation", Ddb2Operations.DeleteItem.name());
 
-        processor.process(Ddb2Operations.DeleteItem.name(), exchange);
+        inputType.convert(exchange);
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
         Assertions.assertEquals(Ddb2Operations.DeleteItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION));
@@ -119,8 +126,8 @@ class JsonToDdbModelConverterTest {
         Exchange exchange = new DefaultExchange(camelContext);
 
         exchange.getMessage().setBody(mapper.readTree("{\"user\":" + itemJson + "}"));
-
-        processor.process(Ddb2Operations.PutItem.name(), exchange);
+        exchange.setProperty("operation", Ddb2Operations.PutItem.name());
+        inputType.convert(exchange);
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
         Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION));
@@ -130,11 +137,12 @@ class JsonToDdbModelConverterTest {
         Assertions.assertEquals(1L, attributeValueMap.size());
 
         Assertions.assertEquals("AttributeValue(M={name=AttributeValue(S=Rajesh Koothrappali), " +
-                "age=AttributeValue(N=29), " +
-                "super-heroes=AttributeValue(SS=[batman, spiderman, wonderwoman]), " +
-                "issues=AttributeValue(NS=[5, 3, 9, 1]), " +
-                "girlfriend=AttributeValue(NUL=true), " +
-                "doctorate=AttributeValue(BOOL=true)})", attributeValueMap.get("user").toString());
+                        "age=AttributeValue(N=29), " +
+                        "super-heroes=AttributeValue(SS=[batman, spiderman, wonderwoman]), " +
+                        "issues=AttributeValue(NS=[5, 3, 9, 1]), " +
+                        "girlfriend=AttributeValue(NUL=true), " +
+                        "doctorate=AttributeValue(BOOL=true)})",
+                attributeValueMap.get("user").toString());
     }
 
     @Test
@@ -142,9 +150,10 @@ class JsonToDdbModelConverterTest {
     void shouldMapEmptyJson() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
-        exchange.getMessage().setBody(mapper.readTree("{}"));
+        exchange.getMessage().setBody("{}");
+        exchange.getMessage().setHeader(Ddb2Constants.OPERATION, Ddb2Operations.PutItem.name());
 
-        processor.process(Ddb2Operations.PutItem.name(), exchange);
+        inputType.convert(exchange);
 
         Assertions.assertTrue(exchange.getMessage().hasHeaders());
         Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION));
@@ -154,20 +163,39 @@ class JsonToDdbModelConverterTest {
         Assertions.assertEquals(0L, attributeValueMap.size());
     }
 
-    @Test()
+    @Test
+    void shouldFailForWrongBodyType() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.getMessage().setBody("Hello");
+
+        Assertions.assertThrows(CamelExecutionException.class, () -> inputType.convert(exchange));
+    }
+
+    @Test
     void shouldFailForUnsupportedOperation() throws Exception {
         Exchange exchange = new DefaultExchange(camelContext);
 
         exchange.getMessage().setBody(mapper.readTree("{}"));
+        exchange.setProperty("operation", Ddb2Operations.BatchGetItems.name());
 
-        Assertions.assertThrows(UnsupportedOperationException.class, () -> processor.process(Ddb2Operations.BatchGetItems.name(), exchange));
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> inputType.convert(exchange));
+    }
+
+    @Test
+    public void shouldLookupDataType() throws Exception {
+        DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry();
+        CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext);
+        Optional<DataTypeConverter> converter = dataTypeRegistry.lookup("aws2-ddb", "json");
+        Assertions.assertTrue(converter.isPresent());
     }
 
     private void assertAttributeValueMap(Map<String, AttributeValue> attributeValueMap) {
         Assertions.assertEquals(6L, attributeValueMap.size());
         Assertions.assertEquals(AttributeValue.builder().s("Rajesh Koothrappali").build(), attributeValueMap.get("name"));
         Assertions.assertEquals(AttributeValue.builder().n("29").build(), attributeValueMap.get("age"));
-        Assertions.assertEquals(AttributeValue.builder().ss("batman", "spiderman", "wonderwoman").build(), attributeValueMap.get("super-heroes"));
+        Assertions.assertEquals(AttributeValue.builder().ss("batman", "spiderman", "wonderwoman").build(),
+                attributeValueMap.get("super-heroes"));
         Assertions.assertEquals(AttributeValue.builder().ns("5", "3", "9", "1").build(), attributeValueMap.get("issues"));
         Assertions.assertEquals(AttributeValue.builder().nul(true).build(), attributeValueMap.get("girlfriend"));
         Assertions.assertEquals(AttributeValue.builder().bool(true).build(), attributeValueMap.get("doctorate"));
@@ -175,11 +203,19 @@ class JsonToDdbModelConverterTest {
 
     private void assertAttributeValueUpdateMap(Map<String, AttributeValueUpdate> attributeValueMap) {
         Assertions.assertEquals(6L, attributeValueMap.size());
-        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().s("Rajesh Koothrappali").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("name"));
-        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().n("29").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("age"));
-        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().ss("batman", "spiderman", "wonderwoman").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("super-heroes"));
-        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().ns("5", "3", "9", "1").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("issues"));
-        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().nul(true).build()).action(AttributeAction.PUT).build(), attributeValueMap.get("girlfriend"));
-        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().bool(true).build()).action(AttributeAction.PUT).build(), attributeValueMap.get("doctorate"));
+        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().s("Rajesh Koothrappali").build())
+                .action(AttributeAction.PUT).build(), attributeValueMap.get("name"));
+        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().n("29").build())
+                .action(AttributeAction.PUT).build(), attributeValueMap.get("age"));
+        Assertions.assertEquals(
+                AttributeValueUpdate.builder().value(AttributeValue.builder().ss("batman", "spiderman", "wonderwoman").build())
+                        .action(AttributeAction.PUT).build(),
+                attributeValueMap.get("super-heroes"));
+        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().ns("5", "3", "9", "1").build())
+                .action(AttributeAction.PUT).build(), attributeValueMap.get("issues"));
+        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().nul(true).build())
+                .action(AttributeAction.PUT).build(), attributeValueMap.get("girlfriend"));
+        Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().bool(true).build())
+                .action(AttributeAction.PUT).build(), attributeValueMap.get("doctorate"));
     }
 }
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputTypeTest.java
new file mode 100644
index 00000000..53357add
--- /dev/null
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputTypeTest.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.kamelets.utils.format.converter.aws2.s3;
+
+import java.io.ByteArrayInputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.Optional;
+
+import org.apache.camel.CamelContextAware;
+import org.apache.camel.Exchange;
+import org.apache.camel.component.aws2.s3.AWS2S3Constants;
+import org.apache.camel.impl.DefaultCamelContext;
+import org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.support.DefaultExchange;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
+import software.amazon.awssdk.core.ResponseInputStream;
+import software.amazon.awssdk.http.AbortableInputStream;
+import software.amazon.awssdk.services.s3.model.GetObjectRequest;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+public class AWS2S3JsonOutputTypeTest {
+
+    private final DefaultCamelContext camelContext = new DefaultCamelContext();
+
+    private final AWS2S3JsonOutputType outputType = new AWS2S3JsonOutputType();
+
+    @Test
+    void shouldMapFromStringToJsonModel() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test1.txt");
+        exchange.getMessage().setBody("Test1");
+        outputType.convert(exchange);
+
+        Assertions.assertTrue(exchange.getMessage().hasHeaders());
+        assertEquals("test1.txt", exchange.getMessage().getHeader(AWS2S3Constants.KEY));
+
+        assertJsonModelBody(exchange, "test1.txt", "Test1");
+    }
+
+    @Test
+    void shouldMapFromBytesToJsonModel() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test2.txt");
+        exchange.getMessage().setBody("Test2".getBytes(StandardCharsets.UTF_8));
+        outputType.convert(exchange);
+
+        Assertions.assertTrue(exchange.getMessage().hasHeaders());
+        assertEquals("test2.txt", exchange.getMessage().getHeader(AWS2S3Constants.KEY));
+
+        assertJsonModelBody(exchange, "test2.txt", "Test2");
+    }
+
+    @Test
+    void shouldMapFromInputStreamToJsonModel() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test3.txt");
+        exchange.getMessage().setBody(new ResponseInputStream<>(GetObjectRequest.builder().bucket("myBucket").key("test3.txt").build(),
+                AbortableInputStream.create(new ByteArrayInputStream("Test3".getBytes(StandardCharsets.UTF_8)))));
+        outputType.convert(exchange);
+
+        Assertions.assertTrue(exchange.getMessage().hasHeaders());
+        assertEquals("test3.txt", exchange.getMessage().getHeader(AWS2S3Constants.KEY));
+
+        assertJsonModelBody(exchange, "test3.txt", "Test3");
+    }
+
+    @Test
+    public void shouldLookupDataType() throws Exception {
+        DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry();
+        CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext);
+        Optional<DataTypeConverter> converter = dataTypeRegistry.lookup("aws2-s3", "json");
+        Assertions.assertTrue(converter.isPresent());
+    }
+
+    private static void assertJsonModelBody(Exchange exchange, String key, String content) {
+        assertEquals(String.format("{\"key\": \"%s\", \"content\": \"%s\"}", key, content), exchange.getMessage().getBody());
+    }
+}
diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java
new file mode 100644
index 00000000..c175cc6d
--- /dev/null
+++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.kamelets.utils.format.converter.standard;
+
+import java.util.Optional;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.camel.CamelContextAware;
+import org.apache.camel.Exchange;
+import org.apache.camel.impl.DefaultCamelContext;
+import org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry;
+import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter;
+import org.apache.camel.support.DefaultExchange;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+public class JsonModelDataTypeTest {
+
+    private final DefaultCamelContext camelContext = new DefaultCamelContext();
+
+    private final JsonModelDataType dataType = new JsonModelDataType();
+
+    @Test
+    void shouldMapFromStringToJsonModel() throws Exception {
+        Exchange exchange = new DefaultExchange(camelContext);
+
+        exchange.setProperty(JsonModelDataType.JSON_DATA_TYPE_KEY, Person.class.getName());
+        exchange.getMessage().setBody("{ \"name\": \"Sheldon\", \"age\": 29}");
+        dataType.convert(exchange);
+
+        assertEquals(Person.class, exchange.getMessage().getBody().getClass());
+        assertEquals("Sheldon", exchange.getMessage().getBody(Person.class).getName());
+    }
+
+    @Test
+    public void shouldLookupDataType() throws Exception {
+        DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry();
+        CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext);
+        Optional<DataTypeConverter> converter = dataTypeRegistry.lookup("jsonObject");
+        Assertions.assertTrue(converter.isPresent());
+    }
+
+    public static class Person {
+        @JsonProperty
+        private String name;
+
+        @JsonProperty
+        private Long age;
+
+        public String getName() {
+            return name;
+        }
+
+        public void setName(String name) {
+            this.name = name;
+        }
+
+        public Long getAge() {
+            return age;
+        }
+
+        public void setAge(Long age) {
+            this.age = age;
+        }
+    }
+
+}
\ No newline at end of file
diff --git a/library/camel-kamelets-utils/src/test/resources/log4j2-test.xml b/library/camel-kamelets-utils/src/test/resources/log4j2-test.xml
new file mode 100644
index 00000000..1d6d8f38
--- /dev/null
+++ b/library/camel-kamelets-utils/src/test/resources/log4j2-test.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~      http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+<Configuration status="INFO">
+  <Appenders>
+    <Console name="STDOUT" target="SYSTEM_OUT">
+      <PatternLayout pattern="%-5level| %msg%n"/>
+    </Console>
+    <Null name="NONE"/>
+  </Appenders>
+
+  <Loggers>
+    <Root level="INFO">
+      <AppenderRef ref="STDOUT"/>
+    </Root>
+  </Loggers>
+
+</Configuration>
diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml
index 5b603abf..ba200347 100644
--- a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml
+++ b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml
@@ -97,6 +97,12 @@ spec:
         x-descriptors:
           - 'urn:alm:descriptor:com.tectonic.ui:checkbox'
         default: false
+      inputFormat:
+        title: Input Type
+        description: Specify the input type for this Kamelet. The Kamelet will automatically apply conversion logic in order to transform message content to this data type.
+        type: string
+        default: json
+        example: json
   types:
     in:
       mediaType: application/json
@@ -107,17 +113,24 @@ spec:
   - "camel:aws2-ddb"
   - "camel:kamelet"
   template:
+    beans:
+    - name: dataTypeRegistry
+      type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry"
+    - name: inputTypeProcessor
+      type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor"
+      property:
+        - key: scheme
+          value: 'aws2-ddb'
+        - key: format
+          value: '{{inputFormat}}'
     from:
       uri: "kamelet:source"
       steps:
       - set-property:
-          name: operation
-          constant: "{{operation}}"
-      - unmarshal:
-          json:
-            library: Jackson
-            unmarshalType: com.fasterxml.jackson.databind.JsonNode
-      - bean: "org.apache.camel.kamelets.utils.transform.aws.ddb.JsonToDdbModelConverter"
+        name: operation
+        constant: "{{operation}}"
+      - process:
+          ref: "{{inputTypeProcessor}}"
       - to:
           uri: "aws2-ddb:{{table}}"
           parameters:
diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml
index 6ab2bca4..e09cf4aa 100644
--- a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml
+++ b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml
@@ -107,6 +107,12 @@ spec:
         description: The number of milliseconds before the next poll of the selected bucket.
         type: integer
         default: 500
+      outputFormat:
+        title: Output Type
+        description: Choose the output type for this Kamelet. The Kamelet supports different output types and performs automatic message conversion according to this data type.
+        type: string
+        default: binary
+        example: binary
   dependencies:
     - "camel:core"
     - "camel:aws2-s3"
@@ -114,6 +120,15 @@ spec:
     - "camel:kamelet"
   template:
     beans:
+      - name: dataTypeRegistry
+        type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry"
+      - name: outputTypeProcessor
+        type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor"
+        property:
+          - key: scheme
+            value: 'aws2-s3'
+          - key: format
+            value: '{{outputFormat}}'
       - name: renameHeaders
         type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders"
         property:
@@ -143,4 +158,6 @@ spec:
       steps:
       - process:
           ref: "{{renameHeaders}}"
+      - process:
+          ref: "{{outputTypeProcessor}}"
       - to: "kamelet:sink"
diff --git a/test/aws-s3/README.md b/test/aws-s3/README.md
new file mode 100644
index 00000000..6e7d7315
--- /dev/null
+++ b/test/aws-s3/README.md
@@ -0,0 +1,76 @@
+# AWS S3 Kamelet test
+
+This test verifies the AWS S3 Kamelet source defined in [aws-s3-source.kamelet.yaml](aws-s3-source.kamelet.yaml)
+
+## Objectives
+
+The test verifies the AWS S3 Kamelet source by creating a Camel K integration that uses the Kamelet and listens for messages on the
+AWS S3 bucket.
+
+The test uses a [LocalStack Testcontainers](https://www.testcontainers.org/modules/localstack/) instance to start a local AWS S3 service for mocking reasons.
+The Kamelet and the test interact with the local AWS S3 service for validation of functionality.
+
+### Test Kamelet source
+
+The test performs the following high level steps for configs - URI, secret and property based:
+
+*Preparation*
+- Start the AWS S3 service as LocalStack container
+- Overwrite the Kamelet with the latest source
+- Prepare the Camel AWS S3 client
+
+*Scenario* 
+- Create the Kamelet in the current namespace in the cluster
+- Create the Camel K integration that uses the Kamelet source to consume data from AWS S3 service
+- Wait for the Camel K integration to start and listen for AWS S3 messages
+- Create a new message in the AWS S3 bucket
+- Verify that the integration has received the message event
+
+*Cleanup*
+- Stop the LocalStack container
+- Delete the Camel K integration
+- Delete the secret from the current namespacce
+
+## Installation
+
+The test assumes that you have access to a Kubernetes cluster and that the Camel K operator as well as the YAKS operator is installed
+and running.
+
+You can review the installation steps for the operators in the documentation:
+
+- [Install Camel K operator](https://camel.apache.org/camel-k/latest/installation/installation.html)
+- [Install YAKS operator](https://github.com/citrusframework/yaks#installation)
+
+## Run the tests
+
+To run tests with URI based configuration: 
+
+```shell script
+$ yaks test aws-s3-source-uri-conf.feature
+```
+
+To run tests with secret based configuration:
+
+```shell script
+$ yaks test aws-s3-source-secret-conf.feature
+```
+
+To run tests with property based configuration:
+
+```shell script
+$ yaks test aws-s3-source-property-conf.feature
+```
+
+To run tests with URI binding:
+
+```shell script
+$ yaks test aws-s3-uri-binding.feature
+```
+
+To run tests with binding to Knative channel:
+
+```shell script
+$ yaks test aws-s3-inmem-binding.feature
+```
+
+You will be provided with the test log output and the test results.
diff --git a/test/aws-s3/amazonS3Client.groovy b/test/aws-s3/amazonS3Client.groovy
new file mode 100644
index 00000000..5c3ff8a0
--- /dev/null
+++ b/test/aws-s3/amazonS3Client.groovy
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials
+import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider
+import software.amazon.awssdk.regions.Region
+import software.amazon.awssdk.services.s3.S3Client
+
+S3Client s3 = S3Client
+        .builder()
+        .endpointOverride(URI.create("${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}"))
+        .credentialsProvider(StaticCredentialsProvider.create(
+                AwsBasicCredentials.create(
+                        "${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}",
+                        "${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}")
+        ))
+        .region(Region.of("${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}"))
+        .build()
+
+s3.createBucket(b -> b.bucket("${aws.s3.bucketNameOrArn}"))
+
+return s3
diff --git a/test/aws-s3/aws-s3-credentials.properties b/test/aws-s3/aws-s3-credentials.properties
new file mode 100644
index 00000000..f9dd1e10
--- /dev/null
+++ b/test/aws-s3/aws-s3-credentials.properties
@@ -0,0 +1,7 @@
+# Please add your AWS S3 account credentials
+camel.kamelet.aws-s3-source.aws-s3-credentials.bucketNameOrArn=${aws.s3.bucketNameOrArn}
+camel.kamelet.aws-s3-source.aws-s3-credentials.overrideEndpoint=true
+camel.kamelet.aws-s3-source.aws-s3-credentials.uriEndpointOverride=${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}
+camel.kamelet.aws-s3-source.aws-s3-credentials.secretKey=${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}
+camel.kamelet.aws-s3-source.aws-s3-credentials.accessKey=${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}
+camel.kamelet.aws-s3-source.aws-s3-credentials.region=${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}
diff --git a/test/aws-s3/aws-s3-inmem-binding.feature b/test/aws-s3/aws-s3-inmem-binding.feature
new file mode 100644
index 00000000..d67e7798
--- /dev/null
+++ b/test/aws-s3/aws-s3-inmem-binding.feature
@@ -0,0 +1,49 @@
+@knative
+Feature: AWS S3 Kamelet - binding to InMemoryChannel
+
+  Background:
+    Given Kamelet aws-s3-source is available
+    Given variables
+      | aws.s3.bucketNameOrArn | mybucket |
+      | aws.s3.message | Hello from S3 Kamelet |
+      | aws.s3.key | hello.txt |
+
+  Scenario: Start LocalStack container
+    Given Enable service S3
+    Given start LocalStack container
+    And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}'
+
+  Scenario: Create AWS-S3 client
+    Given New global Camel context
+    Given load to Camel registry amazonS3Client.groovy
+
+  Scenario: Create Knative broker and channel
+    Given create Knative broker default
+    And Knative broker default is running
+    Given create Knative channel messages
+
+  Scenario: Create AWS-S3 Kamelet to InMemoryChannel binding
+    Given variable loginfo is "Installed features"
+    Given load KameletBinding aws-s3-to-inmem.yaml
+    Given load KameletBinding inmem-to-log.yaml
+    Then KameletBinding aws-s3-to-inmem should be available
+    And KameletBinding inmem-to-log should be available
+    And Camel K integration aws-s3-to-inmem is running
+    And Camel K integration inmem-to-log is running
+    And Camel K integration aws-s3-to-inmem should print ${loginfo}
+    And Camel K integration inmem-to-log should print ${loginfo}
+    Then sleep 10000 ms
+
+  Scenario: Verify Kamelet source
+    Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}"
+    Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message}
+    Then Camel K integration inmem-to-log should print ${aws.s3.message}
+
+  Scenario: Remove resources
+    Given delete KameletBinding aws-s3-to-inmem
+    Given delete KameletBinding inmem-to-log
+    Given delete Knative broker default
+    Given delete Knative channel messages
+
+  Scenario: Stop container
+    Given stop LocalStack container
diff --git a/test/aws-s3/aws-s3-source-property-conf.feature b/test/aws-s3/aws-s3-source-property-conf.feature
new file mode 100644
index 00000000..93a2d353
--- /dev/null
+++ b/test/aws-s3/aws-s3-source-property-conf.feature
@@ -0,0 +1,37 @@
+Feature: AWS S3 Kamelet - property based config
+
+  Background:
+    Given Kamelet aws-s3-source is available
+    Given variables
+      | aws.s3.bucketNameOrArn | mybucket |
+      | aws.s3.message | Hello from S3 Kamelet |
+      | aws.s3.key | hello.txt |
+
+  Scenario: Start LocalStack container
+    Given Enable service S3
+    Given start LocalStack container
+    And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}'
+
+  Scenario: Create AWS-S3 client
+    Given New global Camel context
+    Given load to Camel registry amazonS3Client.groovy
+
+  Scenario: Create AWS-S3 Kamelet to log binding
+    Given Camel K integration property file aws-s3-credentials.properties
+    Given create Camel K integration aws-s3-to-log-prop-based.groovy
+    """
+    from("kamelet:aws-s3-source/aws-s3-credentials")
+      .to("log:info")
+    """
+    Then Camel K integration aws-s3-to-log-prop-based should be running
+
+  Scenario: Verify Kamelet source
+    Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}"
+    Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message}
+    Then Camel K integration aws-s3-to-log-prop-based should print ${aws.s3.message}
+
+  Scenario: Remove Camel K resources
+    Given delete Camel K integration aws-s3-to-log-prop-based
+
+  Scenario: Stop container
+    Given stop LocalStack container
diff --git a/test/aws-s3/aws-s3-source-secret-conf.feature b/test/aws-s3/aws-s3-source-secret-conf.feature
new file mode 100644
index 00000000..78ee9be5
--- /dev/null
+++ b/test/aws-s3/aws-s3-source-secret-conf.feature
@@ -0,0 +1,39 @@
+@ignored
+Feature: AWS S3 Kamelet - secret based config
+
+  Background:
+    Given Kamelet aws-s3-source is available
+    Given variables
+      | aws.s3.bucketNameOrArn | mybucket |
+      | aws.s3.message | Hello from S3 Kamelet |
+      | aws.s3.key | hello.txt |
+
+  Scenario: Start LocalStack container
+    Given Enable service S3
+    Given start LocalStack container
+    And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}'
+
+  Scenario: Create AWS-S3 client
+    Given New global Camel context
+    Given load to Camel registry amazonS3Client.groovy
+
+  Scenario: Create AWS-S3 Kamelet to log binding
+    Given create Kubernetes secret aws-s3-source-credentials
+      | aws-s3-credentials.properties | citrus:encodeBase64(citrus:readFile(aws-s3-credentials.properties)) |
+    Given create labels on Kubernetes secret aws-s3-source-credentials
+      | camel.apache.org/kamelet               | aws-s3-source |
+      | camel.apache.org/kamelet.configuration | aws-s3-credentials |
+    Given load Camel K integration aws-s3-to-log-secret-based.groovy
+    Then Camel K integration aws-s3-to-log-secret-based should be running
+
+  Scenario: Verify Kamelet source
+    Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}"
+    Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message}
+    Then Camel K integration aws-s3-to-log-secret-based should print ${aws.s3.message}
+
+  Scenario: Remove resources
+    Given delete Camel K integration aws-s3-to-log-secret-based
+    Given delete Kubernetes secret aws-s3-source-credentials
+
+  Scenario: Stop container
+    Given stop LocalStack container
diff --git a/test/aws-s3/aws-s3-source-uri-conf.feature b/test/aws-s3/aws-s3-source-uri-conf.feature
new file mode 100644
index 00000000..ca65ba7d
--- /dev/null
+++ b/test/aws-s3/aws-s3-source-uri-conf.feature
@@ -0,0 +1,32 @@
+Feature: AWS S3 Kamelet - URI based config
+
+  Background:
+    Given Kamelet aws-s3-source is available
+    Given variables
+      | aws.s3.bucketNameOrArn | mybucket |
+      | aws.s3.message | Hello from S3 Kamelet |
+      | aws.s3.key | hello.txt |
+
+  Scenario: Start LocalStack container
+    Given Enable service S3
+    Given start LocalStack container
+    And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}'
+
+  Scenario: Create S3 client
+    Given New global Camel context
+    Given load to Camel registry amazonS3Client.groovy
+
+  Scenario: Create AWS-S3 Kamelet to log binding
+    Given load Camel K integration aws-s3-to-log-uri-based.groovy
+    Then Camel K integration aws-s3-to-log-uri-based should be running
+
+  Scenario: Verify Kamelet source
+    Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}"
+    Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message}
+    Then Camel K integration aws-s3-to-log-uri-based should print ${aws.s3.message}
+
+  Scenario: Remove Camel K resources
+    Given delete Camel K integration aws-s3-to-log-uri-based
+
+  Scenario: Stop container
+    Given stop LocalStack container
diff --git a/test/aws-s3/aws-s3-to-inmem.yaml b/test/aws-s3/aws-s3-to-inmem.yaml
new file mode 100644
index 00000000..ce880028
--- /dev/null
+++ b/test/aws-s3/aws-s3-to-inmem.yaml
@@ -0,0 +1,39 @@
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+
+apiVersion: camel.apache.org/v1alpha1
+kind: KameletBinding
+metadata:
+  name: aws-s3-to-inmem
+spec:
+  source:
+    ref:
+      kind: Kamelet
+      apiVersion: camel.apache.org/v1alpha1
+      name: aws-s3-source
+    properties:
+      bucketNameOrArn: ${aws.s3.bucketNameOrArn}
+      overrideEndpoint: true
+      uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}
+      accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}
+      secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}
+      region: ${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}
+  sink:
+    ref:
+      kind: InMemoryChannel
+      apiVersion: messaging.knative.dev/v1
+      name: messages
diff --git a/test/aws-s3/aws-s3-to-log-secret-based.groovy b/test/aws-s3/aws-s3-to-log-secret-based.groovy
new file mode 100644
index 00000000..02fb1c58
--- /dev/null
+++ b/test/aws-s3/aws-s3-to-log-secret-based.groovy
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// camel-k: language=groovy
+
+from("kamelet:aws-s3-source/aws-s3-credentials")
+  .to("log:info")
diff --git a/test/aws-s3/aws-s3-to-log-uri-based.groovy b/test/aws-s3/aws-s3-to-log-uri-based.groovy
new file mode 100644
index 00000000..145b5510
--- /dev/null
+++ b/test/aws-s3/aws-s3-to-log-uri-based.groovy
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// camel-k: language=groovy
+
+def parameters = 'bucketNameOrArn=${aws.s3.bucketNameOrArn}&'+
+                 'overrideEndpoint=true&' +
+                 'uriEndpointOverride=${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}&' +
+                 'accessKey=${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}&' +
+                 'secretKey=${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}&'+
+                 'region=${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}&'+
+                 'deleteAfterRead=true'
+
+from("kamelet:aws-s3-source?$parameters")
+  .to("log:info")
diff --git a/test/aws-s3/aws-s3-uri-binding.feature b/test/aws-s3/aws-s3-uri-binding.feature
new file mode 100644
index 00000000..ace19177
--- /dev/null
+++ b/test/aws-s3/aws-s3-uri-binding.feature
@@ -0,0 +1,35 @@
+Feature: AWS S3 Kamelet - binding to URI
+
+  Background:
+    Given Kamelet aws-s3-source is available
+    Given variables
+      | aws.s3.bucketNameOrArn | mybucket |
+      | aws.s3.message | Hello from S3 Kamelet |
+      | aws.s3.key | hello.txt |
+
+  Scenario: Start LocalStack container
+    Given Enable service S3
+    Given start LocalStack container
+    And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}'
+
+  Scenario: Create AWS-S3 client
+    Given New global Camel context
+    Given load to Camel registry amazonS3Client.groovy
+
+  Scenario: Create AWS-S3 Kamelet to log binding
+    Given variable loginfo is "Installed features"
+    When load KameletBinding aws-s3-uri-binding.yaml
+    And KameletBinding aws-s3-uri-binding is available
+    And Camel K integration aws-s3-uri-binding is running
+    Then Camel K integration aws-s3-uri-binding should print ${loginfo}
+
+  Scenario: Verify Kamelet source
+    Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}"
+    Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message}
+    Then Camel K integration aws-s3-uri-binding should print ${aws.s3.message}
+
+  Scenario: Remove Camel K resources
+    Given delete KameletBinding aws-s3-uri-binding
+
+  Scenario: Stop container
+    Given stop LocalStack container
diff --git a/test/aws-s3/aws-s3-uri-binding.yaml b/test/aws-s3/aws-s3-uri-binding.yaml
new file mode 100644
index 00000000..50522818
--- /dev/null
+++ b/test/aws-s3/aws-s3-uri-binding.yaml
@@ -0,0 +1,37 @@
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+
+apiVersion: camel.apache.org/v1alpha1
+kind: KameletBinding
+metadata:
+  name: aws-s3-uri-binding
+spec:
+  source:
+    ref:
+      kind: Kamelet
+      apiVersion: camel.apache.org/v1alpha1
+      name: aws-s3-source
+    properties:
+      bucketNameOrArn: ${aws.s3.bucketNameOrArn}
+      overrideEndpoint: true
+      outputFormat: json
+      uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}
+      accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}
+      secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}
+      region: ${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}
+  sink:
+    uri: log:info
diff --git a/test/aws-s3/yaks-config.yaml b/test/aws-s3/yaks-config.yaml
new file mode 100644
index 00000000..f36d136c
--- /dev/null
+++ b/test/aws-s3/yaks-config.yaml
@@ -0,0 +1,65 @@
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+
+config:
+  namespace:
+    temporary: false
+  runtime:
+    testcontainers:
+      enabled: true
+    env:
+      - name: YAKS_CAMEL_AUTO_REMOVE_RESOURCES
+        value: false
+      - name: YAKS_CAMELK_AUTO_REMOVE_RESOURCES
+        value: false
+      - name: YAKS_KAMELETS_AUTO_REMOVE_RESOURCES
+        value: false
+      - name: YAKS_KUBERNETES_AUTO_REMOVE_RESOURCES
+        value: false
+      - name: YAKS_KNATIVE_AUTO_REMOVE_RESOURCES
+        value: false
+      - name: YAKS_TESTCONTAINERS_AUTO_REMOVE_RESOURCES
+        value: false
+      - name: CITRUS_TYPE_CONVERTER
+        value: camel
+    resources:
+      - amazonS3Client.groovy
+      - aws-s3-credentials.properties
+      - aws-s3-to-log-uri-based.groovy
+      - aws-s3-to-log-secret-based.groovy
+      - aws-s3-uri-binding.yaml
+      - aws-s3-to-inmem.yaml
+      - ../utils/inmem-to-log.yaml
+    cucumber:
+      tags:
+        - "not @ignored"
+    settings:
+      dependencies:
+        - groupId: com.amazonaws
+          artifactId: aws-java-sdk-kinesis
+          version: "@aws-java-sdk.version@"
+        - groupId: org.apache.camel
+          artifactId: camel-aws2-s3
+          version: "@camel.version@"
+        - groupId: org.apache.camel
+          artifactId: camel-jackson
+          version: "@camel.version@"
+  dump:
+    enabled: true
+    failedOnly: true
+    includes:
+      - app=camel-k
diff --git a/test/utils/inmem-to-log.yaml b/test/utils/inmem-to-log.yaml
new file mode 100644
index 00000000..8b5dc51e
--- /dev/null
+++ b/test/utils/inmem-to-log.yaml
@@ -0,0 +1,29 @@
+# ---------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ---------------------------------------------------------------------------
+
+apiVersion: camel.apache.org/v1alpha1
+kind: KameletBinding
+metadata:
+  name: inmem-to-log
+spec:
+  source:
+    ref:
+      kind: InMemoryChannel
+      apiVersion: messaging.knative.dev/v1
+      name: messages
+  sink:
+    uri: log:info