You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@camel.apache.org by dk...@apache.org on 2016/03/01 21:01:58 UTC

[1/9] camel git commit: Initial revision

Repository: camel
Updated Branches:
  refs/heads/master 837f355c2 -> e589faa82


Initial revision


Project: http://git-wip-us.apache.org/repos/asf/camel/repo
Commit: http://git-wip-us.apache.org/repos/asf/camel/commit/4c78e449
Tree: http://git-wip-us.apache.org/repos/asf/camel/tree/4c78e449
Diff: http://git-wip-us.apache.org/repos/asf/camel/diff/4c78e449

Branch: refs/heads/master
Commit: 4c78e449c8acbe854a9409321e9ea5625778ed72
Parents: 
Author: Jean-Baptiste Onofré <jb...@apache.org>
Authored: Wed Oct 15 18:10:00 2014 +0200
Committer: Jean-Baptiste Onofré <jb...@apache.org>
Committed: Wed Oct 15 18:10:00 2014 +0200

----------------------------------------------------------------------
 .gitignore                                      |   3 +
 pom.xml                                         |  60 ++++++
 .../camel/component/gridfs/GridFsComponent.java |  56 +++++
 .../camel/component/gridfs/GridFsEndpoint.java  | 143 +++++++++++++
 .../camel/component/gridfs/GridFsProducer.java  |  90 ++++++++
 src/main/resources/META-INF/LICENSE.txt         | 203 +++++++++++++++++++
 src/main/resources/META-INF/NOTICE.txt          |  11 +
 .../services/org/apache/camel/component/gridfs  |  18 ++
 8 files changed, 584 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/camel/blob/4c78e449/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..fe30fe5
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,3 @@
+target
+*.i??
+.idea

http://git-wip-us.apache.org/repos/asf/camel/blob/4c78e449/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..5b1e1a1
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    (the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <groupId>org.apache.camel</groupId>
+        <artifactId>components</artifactId>
+        <version>2.13.2</version>
+    </parent>
+
+    <artifactId>camel-gridfs</artifactId>
+    <packaging>bundle</packaging>
+    <name>Camel :: MongoDB GridFS</name>
+    <description>Camel MongoDB GridFS component</description>
+
+    <properties>
+        <camel.osgi.export.pkg>org.apache.camel.component.gridfs.*</camel.osgi.export.pkg>
+        <camel.osgi.export.service>org.apache.camel.spi.ComponentResolver;component=gridfs</camel.osgi.export.service>
+    </properties>
+
+    <dependencies>
+
+        <dependency>
+            <groupId>org.apache.camel</groupId>
+            <artifactId>camel-core</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.camel</groupId>
+            <artifactId>camel-jackson</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.mongodb</groupId>
+            <artifactId>mongo-java-driver</artifactId>
+            <version>${mongo-java-driver-version}</version>
+        </dependency>
+
+    </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/camel/blob/4c78e449/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java b/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
new file mode 100644
index 0000000..87d5394
--- /dev/null
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.gridfs;
+
+import com.mongodb.Mongo;
+import org.apache.camel.Endpoint;
+import org.apache.camel.impl.DefaultComponent;
+import org.apache.camel.util.CamelContextHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Map;
+
+public class GridFsComponent extends DefaultComponent {
+
+    private final static Logger LOG = LoggerFactory.getLogger(GridFsComponent.class);
+
+    private volatile Mongo db;
+
+    protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
+        if (db == null) {
+            db = CamelContextHelper.mandatoryLookup(getCamelContext(), remaining, Mongo.class);
+            LOG.debug("Resolved the connection with the name {} as {}", remaining, db);
+        }
+
+        Endpoint endpoint = new GridFsEndpoint(uri, this);
+        parameters.put("mongoConnection", db);
+        setProperties(endpoint, parameters);
+
+        return endpoint;
+    }
+
+    @Override
+    protected void doShutdown() throws Exception {
+        if (db != null) {
+            LOG.debug("Closing the connection {} on {}", db, this);
+            db.close();
+        }
+        super.doShutdown();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/camel/blob/4c78e449/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java b/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
new file mode 100644
index 0000000..d630160
--- /dev/null
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
@@ -0,0 +1,143 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.gridfs;
+
+import com.mongodb.DB;
+import com.mongodb.DBCollection;
+import com.mongodb.Mongo;
+import com.mongodb.gridfs.GridFS;
+import org.apache.camel.Consumer;
+import org.apache.camel.Processor;
+import org.apache.camel.Producer;
+import org.apache.camel.impl.DefaultEndpoint;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class GridFsEndpoint extends DefaultEndpoint {
+
+    private static final Logger LOG = LoggerFactory.getLogger(GridFsEndpoint.class);
+
+    private Mongo mongoConnection;
+    private String database;
+    private String colCounters;
+    private String colTP;
+    private DBCollection dbColCounters;
+    private DBCollection dbColTP;
+    private DB db;
+    private GridFS gridFs;
+
+    public GridFsEndpoint() { }
+
+    public GridFsEndpoint(String uri, GridFsComponent component) {
+        super(uri, component);
+    }
+
+    public GridFsEndpoint(String endpointUri) {
+        super(endpointUri);
+    }
+
+    public Producer createProducer() throws Exception {
+        initializeConnection();
+        return new GridFsProducer(this);
+    }
+
+    public Consumer createConsumer(Processor processor) {
+        return null;
+    }
+
+    public boolean isSingleton() {
+        return true;
+    }
+
+    public void initializeConnection() throws Exception {
+        LOG.info("Initialize GridFS endpoint: {}", this.toString());
+        if (database == null || colCounters == null || colTP == null) {
+            throw new IllegalStateException("Missing required endpoint configuration: database and/or colCounters and/or colTP");
+        }
+        db = mongoConnection.getDB(database);
+        if (db == null) {
+            throw new IllegalStateException("Could not initialize GridFsComponent. Database " + database + " does not exist.");
+        }
+        dbColCounters = db.getCollection(colCounters);
+        dbColTP = db.getCollection(colTP);
+        gridFs = new GridFS(db);
+    }
+
+    public Mongo getMongoConnection() {
+        return mongoConnection;
+    }
+
+    public void setMongoConnection(Mongo mongoConnection) {
+        this.mongoConnection = mongoConnection;
+    }
+
+    public String getDatabase() {
+        return database;
+    }
+
+    public void setDatabase(String database) {
+        this.database = database;
+    }
+
+    public String getColCounters() {
+        return colCounters;
+    }
+
+    public void setColCounters(String colCounters) {
+        this.colCounters = colCounters;
+    }
+
+    public String getColTP() {
+        return colTP;
+    }
+
+    public void setColTP(String colTP) {
+        this.colTP = colTP;
+    }
+
+    public DBCollection getDbColCounters() {
+        return dbColCounters;
+    }
+
+    public void setDbColCounters(DBCollection dbColCounters) {
+        this.dbColCounters = dbColCounters;
+    }
+
+    public DBCollection getDbColTP() {
+        return dbColTP;
+    }
+
+    public void setDbColTP(DBCollection dbColTP) {
+        this.dbColTP = dbColTP;
+    }
+
+    public DB getDb() {
+        return db;
+    }
+
+    public void setDb(DB db) {
+        this.db = db;
+    }
+
+    public GridFS getGridFs() {
+        return gridFs;
+    }
+
+    public void setGridFs(GridFS gridFs) {
+        this.gridFs = gridFs;
+    }
+}

http://git-wip-us.apache.org/repos/asf/camel/blob/4c78e449/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java b/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
new file mode 100644
index 0000000..e76af23
--- /dev/null
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.gridfs;
+
+import com.mongodb.DBObject;
+import com.mongodb.gridfs.GridFSInputFile;
+import com.mongodb.util.JSON;
+import org.apache.camel.Exchange;
+import org.apache.camel.impl.DefaultProducer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+
+public class GridFsProducer extends DefaultProducer {
+
+    private static final Logger LOG = LoggerFactory.getLogger(GridFsProducer.class);
+    private GridFsEndpoint endpoint;
+
+    public GridFsProducer(GridFsEndpoint endpoint) {
+        super(endpoint);
+        this.endpoint = endpoint;
+    }
+
+    public void process(Exchange exchange) throws Exception {
+        // set DBObject for query
+        DBObject dbObjQuery = (DBObject) JSON.parse("{_id:'inventory'}");
+
+        // set DBObject for update
+        DBObject dbObjUpdate = (DBObject) JSON.parse("{$inc:{seq:1}}");
+
+        // get inventoryID
+        DBObject invID = endpoint.getDbColCounters().findAndModify(dbObjQuery, dbObjUpdate);
+
+        // get the in message body
+        String TPID = exchange.getIn().getBody().toString();
+
+        // TODO set generic
+        // specific: get trading partner name, load_type, do_legacy
+        DBObject dbObjTPQuery = (DBObject) JSON.parse("{'tpid':'" + TPID + "'}");
+        DBObject tpName = endpoint.getDbColTP().findOne(dbObjTPQuery);
+
+        // set the tpName and tpLoadType in the headers
+        exchange.getIn().setHeader("tpName", tpName.get("name").toString());
+        exchange.getIn().setHeader("tpLoadType", tpName.get("load_type").toString());
+        // most won't have do_legacy, so catch error and default to 'Y'
+        try {
+            exchange.getIn().setHeader("tpDoLegacy", tpName.get("do_legacy").toString());
+        } catch (Exception e) {
+            exchange.getIn().setHeader("tpDoLegacy", "Y");
+        }
+
+        // save the TPID for move
+        exchange.getIn().setHeader("TPID", TPID);
+
+        String sInv = invID.get("seq").toString();
+        // strip off decimal
+        sInv = sInv.substring(0, sInv.lastIndexOf("."));
+        exchange.getIn().setHeader("mInv", sInv);
+
+        File file = new File(exchange.getIn().getHeader("gridFsInputFile").toString());
+        GridFSInputFile gfsFile = endpoint.getGridFs().createFile(file);
+
+        // set filename
+        gfsFile.setFilename(exchange.getIn().getHeader("gridFsFileName").toString());
+
+        // add metadata
+        String metaData = "{'inventoryID':" + sInv + ", 'TPID':'" + TPID + "', 'doc_type':'original', 'status':'initial_save'}";
+        DBObject dbObject = (DBObject) JSON.parse(metaData);
+        gfsFile.setMetaData(dbObject);
+
+        // save the input file into mongoDB
+        gfsFile.save();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/camel/blob/4c78e449/src/main/resources/META-INF/LICENSE.txt
----------------------------------------------------------------------
diff --git a/src/main/resources/META-INF/LICENSE.txt b/src/main/resources/META-INF/LICENSE.txt
new file mode 100644
index 0000000..6b0b127
--- /dev/null
+++ b/src/main/resources/META-INF/LICENSE.txt
@@ -0,0 +1,203 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+

http://git-wip-us.apache.org/repos/asf/camel/blob/4c78e449/src/main/resources/META-INF/NOTICE.txt
----------------------------------------------------------------------
diff --git a/src/main/resources/META-INF/NOTICE.txt b/src/main/resources/META-INF/NOTICE.txt
new file mode 100644
index 0000000..2e215bf
--- /dev/null
+++ b/src/main/resources/META-INF/NOTICE.txt
@@ -0,0 +1,11 @@
+   =========================================================================
+   ==  NOTICE file corresponding to the section 4 d of                    ==
+   ==  the Apache License, Version 2.0,                                   ==
+   ==  in this case for the Apache Camel distribution.                    ==
+   =========================================================================
+
+   This product includes software developed by
+   The Apache Software Foundation (http://www.apache.org/).
+
+   Please read the different LICENSE files present in the licenses directory of
+   this distribution.

http://git-wip-us.apache.org/repos/asf/camel/blob/4c78e449/src/main/resources/META-INF/services/org/apache/camel/component/gridfs
----------------------------------------------------------------------
diff --git a/src/main/resources/META-INF/services/org/apache/camel/component/gridfs b/src/main/resources/META-INF/services/org/apache/camel/component/gridfs
new file mode 100644
index 0000000..50df682
--- /dev/null
+++ b/src/main/resources/META-INF/services/org/apache/camel/component/gridfs
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+class=org.apache.camel.component.gridfs.GridFsComponent


[6/9] camel git commit: [CAMEL-9659] Add component docs

Posted by dk...@apache.org.
[CAMEL-9659] Add component docs


Project: http://git-wip-us.apache.org/repos/asf/camel/repo
Commit: http://git-wip-us.apache.org/repos/asf/camel/commit/9ec105a3
Tree: http://git-wip-us.apache.org/repos/asf/camel/tree/9ec105a3
Diff: http://git-wip-us.apache.org/repos/asf/camel/diff/9ec105a3

Branch: refs/heads/master
Commit: 9ec105a3e251a59d982146aea5c4f2df0505866f
Parents: 4520410
Author: Daniel Kulp <dk...@apache.org>
Authored: Tue Mar 1 13:01:36 2016 -0500
Committer: Daniel Kulp <dk...@apache.org>
Committed: Tue Mar 1 14:23:11 2016 -0500

----------------------------------------------------------------------
 .../camel/component/gridfs/GridFsEndpoint.java  | 73 ++++++++++++++++----
 1 file changed, 61 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/camel/blob/9ec105a3/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java b/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
index 554c4cd..4856a7f 100644
--- a/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
@@ -52,9 +52,9 @@ public class GridFsEndpoint extends DefaultEndpoint {
 
     @UriPath @Metadata(required = "true")
     private String connectionBean;
-    @UriParam
+    @UriParam @Metadata(required = "true")
     private String database;
-    @UriParam
+    @UriParam(defaultValue = GridFS.DEFAULT_BUCKET)
     private String bucket;
     @UriParam(enums = "ACKNOWLEDGED,W1,W2,W3,UNACKNOWLEDGED,JOURNALED,MAJORITY,SAFE")
     private WriteConcern writeConcern;
@@ -62,23 +62,24 @@ public class GridFsEndpoint extends DefaultEndpoint {
     private WriteConcern writeConcernRef;
     @UriParam
     private ReadPreference readPreference;
-    @UriParam
+    
+    @UriParam(label = "producer")
     private String operation;
 
-    @UriParam
+    @UriParam(label = "consumer")
     private String query;
-    @UriParam
+    @UriParam(label = "consumer", defaultValue = "1000")
     private long initialDelay = 1000;
-    @UriParam
+    @UriParam(label = "consumer", defaultValue = "500")
     private long delay = 500;
     
-    @UriParam 
+    @UriParam(label = "consumer", defaultValue = "TimeStamp")
     private QueryStrategy queryStrategy = QueryStrategy.TimeStamp;
-    @UriParam
+    @UriParam(label = "consumer", defaultValue = "camel-timestamps")
     private String persistentTSCollection = "camel-timestamps";
-    @UriParam
+    @UriParam(label = "consumer", defaultValue = "camel-timestamp")
     private String persistentTSObject = "camel-timestamp";
-    @UriParam
+    @UriParam(label = "consumer", defaultValue = "camel-processed")
     private String fileAttributeName = "camel-processed";
 
 
@@ -167,6 +168,11 @@ public class GridFsEndpoint extends DefaultEndpoint {
     public Mongo getMongoConnection() {
         return mongoConnection;
     }
+    /**
+     * Sets the Mongo instance that represents the backing connection
+     * 
+     * @param mongoConnection the connection to the database
+     */
     public void setMongoConnection(Mongo mongoConnection) {
         this.mongoConnection = mongoConnection;
     }
@@ -178,9 +184,19 @@ public class GridFsEndpoint extends DefaultEndpoint {
     public String getDatabase() {
         return database;
     }
+    /**
+     * Sets the name of the MongoDB database to target
+     * 
+     * @param database name of the MongoDB database
+     */
     public void setDatabase(String database) {
         this.database = database;
     }
+    /**
+     * Sets the name of the GridFS bucket within the database.   Default is "fs".
+     * 
+     * @param database name of the MongoDB database
+     */
     public String getBucket() {
         return bucket;
     }
@@ -191,40 +207,73 @@ public class GridFsEndpoint extends DefaultEndpoint {
     public String getQuery() {
         return query;
     }
+    /**
+     * Additional query parameters (in JSON) that are used to configure the query used for finding
+     * files in the GridFsConsumer
+     * @param query
+     */
     public void setQuery(String query) {
         this.query = query;
     }
     public long getDelay() {
         return delay;
     }
+    /**
+     * Sets the delay between polls within the Consumer.  Default is 500ms
+     * @param delay
+     */
     public void setDelay(long delay) {
         this.delay = delay;
     }
     public long getInitialDelay() {
         return initialDelay;
     }
+    /**
+     * Sets the initialDelay before the consumer will start polling.  Default is 1000ms
+     * @param initialDelay
+     */
     public void setInitialDelay(long initialDelay) {
         this.initialDelay = delay;
     }
     
+    /**
+     * Sets the QueryStrategy that is used for polling for new files.  Default is Timestamp
+     * @see QueryStrategy
+     * @param s
+     */
     public void setQueryStrategy(String s) {
         queryStrategy = QueryStrategy.valueOf(s);
     }
     public QueryStrategy getQueryStrategy() {
         return queryStrategy;
     }
+    /**
+     * If the QueryType uses a persistent timestamp, this sets the name of the collection within
+     * the DB to store the timestamp.
+     * @param s
+     */
     public void setPersistentTSCollection(String s) {
         persistentTSCollection = s;
     }
     public String getPersistentTSCollection() {
         return persistentTSCollection;
     }
-    public void setPersistentTSObject(String s) {
-        persistentTSObject = s;
+    /**
+     * If the QueryType uses a persistent timestamp, this is the ID of the object in the collection
+     * to store the timestamp.   
+     * @param s
+     */
+    public void setPersistentTSObject(String id) {
+        persistentTSObject = id;
     }
     public String getPersistentTSObject() {
         return persistentTSObject;
     }
+    
+    /**
+     * If the QueryType uses a FileAttribute, this sets the name of the attribute that is used. Default is "camel-processed".
+     * @param f
+     */
     public void setFileAttributeName(String f) {
         fileAttributeName = f;
     }


[4/9] camel git commit: [CAMEL-9659] Add simple polling consumer

Posted by dk...@apache.org.
[CAMEL-9659] Add simple polling consumer


Project: http://git-wip-us.apache.org/repos/asf/camel/repo
Commit: http://git-wip-us.apache.org/repos/asf/camel/commit/ce54b04e
Tree: http://git-wip-us.apache.org/repos/asf/camel/tree/ce54b04e
Diff: http://git-wip-us.apache.org/repos/asf/camel/diff/ce54b04e

Branch: refs/heads/master
Commit: ce54b04e1f0b5f7178dd0ee606d1ec644f24658d
Parents: f80654f
Author: Daniel Kulp <dk...@apache.org>
Authored: Mon Feb 29 15:40:22 2016 -0500
Committer: Daniel Kulp <dk...@apache.org>
Committed: Tue Mar 1 14:23:04 2016 -0500

----------------------------------------------------------------------
 .../camel/component/gridfs/GridFsConsumer.java  | 88 +++++++++++++++++++-
 .../camel/component/gridfs/GridFsEndpoint.java  | 43 +++++++++-
 .../camel/component/gridfs/GridFsProducer.java  | 14 ++--
 .../component/gridfs/AbstractMongoDbTest.java   | 15 +++-
 .../gridfs/GridFsConsumerOperationsTest.java    | 74 ----------------
 .../component/gridfs/GridFsConsumerTest.java    | 75 +++++++++++++++++
 .../gridfs/GridFsProducerOperationsTest.java    | 74 ++++++++++++++++
 7 files changed, 290 insertions(+), 93 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/camel/blob/ce54b04e/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java b/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
index 4f2ce66..240dd47 100644
--- a/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
@@ -19,22 +19,102 @@
 
 package org.apache.camel.component.gridfs;
 
+import java.io.InputStream;
+import java.util.concurrent.ExecutorService;
+
+import com.mongodb.BasicDBObject;
+import com.mongodb.DBCursor;
+import com.mongodb.DBObject;
+import com.mongodb.gridfs.GridFSDBFile;
+import com.mongodb.util.JSON;
+
+import org.apache.camel.Exchange;
 import org.apache.camel.Processor;
 import org.apache.camel.impl.DefaultConsumer;
 
 /**
  * 
  */
-public class GridFsConsumer extends DefaultConsumer {
-    final GridFsEndpoint ep;
-    
+public class GridFsConsumer extends DefaultConsumer implements Runnable {
+    final GridFsEndpoint endpoint;
+    private ExecutorService executor;
+
     /**
      * @param endpoint
      * @param processor
      */
     public GridFsConsumer(GridFsEndpoint endpoint, Processor processor) {
         super(endpoint, processor);
-        ep = endpoint;
+        this.endpoint = endpoint;
+    }
+
+    
+
+    @Override
+    protected void doStop() throws Exception {
+        super.doStop();
+        if (executor != null) {
+            executor.shutdown();
+            executor = null;
+        }
+    }
+
+    @Override
+    protected void doStart() throws Exception {
+        super.doStart();
+        executor = endpoint.getCamelContext().getExecutorServiceManager().newFixedThreadPool(this, endpoint.getEndpointUri(), 1);
+        executor.execute(this);
     }
 
+    @Override
+    public void run() {
+        DBCursor c = null;
+        java.util.Date fromDate = new java.util.Date();
+        try {
+            Thread.sleep(endpoint.getInitialDelay());
+            while (isStarted()) {                
+                if (c == null || c.getCursorId() == 0) {
+                    if (c != null) {
+                        c.close();
+                    }
+                    String queryString = endpoint.getQuery();
+                    DBObject query;
+                    if (queryString == null) {
+                        query = new BasicDBObject();
+                    } else {
+                        query = (DBObject) JSON.parse(queryString);
+                    }
+                    
+                    query.put("uploadDate", new BasicDBObject("$gte", fromDate));
+                    c = endpoint.getFilesCollection().find(query);
+                    fromDate = new java.util.Date();
+                }
+                while (c.hasNext() && isStarted()) {
+                    GridFSDBFile file = (GridFSDBFile)c.next();
+                    file = endpoint.getGridFs().findOne(new BasicDBObject("_id", file.getId()));
+                    
+                    Exchange exchange = endpoint.createExchange();
+                    exchange.getIn().setHeader(GridFsEndpoint.GRIDFS_METADATA, JSON.serialize(file.getMetaData()));
+                    exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType());
+                    exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength());
+                    exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate());
+                    exchange.getIn().setBody(file.getInputStream(), InputStream.class);
+                    try {
+                        getProcessor().process(exchange);
+                    } catch (Exception e) {
+                        // TODO Auto-generated catch block
+                        e.printStackTrace();
+                    }
+                }
+                Thread.sleep(endpoint.getDelay());
+            }
+        } catch (Throwable e1) {
+            // TODO Auto-generated catch block
+            e1.printStackTrace();
+        }
+        if (c != null) {
+            c.close();
+        }
+    }
+    
 }

http://git-wip-us.apache.org/repos/asf/camel/blob/ce54b04e/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java b/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
index cef109a..008e004 100644
--- a/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
@@ -17,6 +17,7 @@
 package org.apache.camel.component.gridfs;
 
 import com.mongodb.DB;
+import com.mongodb.DBCollection;
 import com.mongodb.Mongo;
 import com.mongodb.ReadPreference;
 import com.mongodb.WriteConcern;
@@ -35,6 +36,9 @@ import org.slf4j.LoggerFactory;
 @UriEndpoint(scheme = "gridfs", title = "MongoDBGridFS", syntax = "gridfs:connectionBean", 
             label = "database,nosql")
 public class GridFsEndpoint extends DefaultEndpoint {
+    public static final String GRIDFS_OPERATION = "gridfs.operation";
+    public static final String GRIDFS_METADATA = "gridfs.metadata";
+    public static final String GRIDFS_CHUNKSIZE = "gridfs.chunksize";
 
     private static final Logger LOG = LoggerFactory.getLogger(GridFsEndpoint.class);
 
@@ -53,10 +57,18 @@ public class GridFsEndpoint extends DefaultEndpoint {
     @UriParam
     private String operation;
 
+    @UriParam
+    private String query;
+    @UriParam
+    private long initialDelay = 1000;
+    @UriParam
+    private long delay = 500;
+    
     
     private Mongo mongoConnection;
     private DB db;
     private GridFS gridFs;
+    private DBCollection filesCollection;
 
     public GridFsEndpoint(String uri, GridFsComponent component) {
         super(uri, component);
@@ -88,7 +100,11 @@ public class GridFsEndpoint extends DefaultEndpoint {
         if (db == null) {
             throw new IllegalStateException("Could not initialize GridFsComponent. Database " + database + " does not exist.");
         }
-        gridFs = new GridFS(db, bucket == null ? GridFS.DEFAULT_BUCKET : bucket);
+        gridFs = new GridFS(db, bucket == null ? GridFS.DEFAULT_BUCKET : bucket) {
+            {
+                filesCollection = getFilesCollection();
+            }
+        };
     }
 
     
@@ -121,8 +137,6 @@ public class GridFsEndpoint extends DefaultEndpoint {
     
     
     // ======= Getters and setters ===============================================
-
-
     public String getConnectionBean() {
         return connectionBean;
     }
@@ -153,6 +167,25 @@ public class GridFsEndpoint extends DefaultEndpoint {
         this.bucket = bucket;
     }
     
+    public String getQuery() {
+        return query;
+    }
+    public void setQuery(String query) {
+        this.query = query;
+    }
+    public long getDelay() {
+        return delay;
+    }
+    public void setDelay(long delay) {
+        this.delay = delay;
+    }
+    public long getInitialDelay() {
+        return initialDelay;
+    }
+    public void setInitialDelay(long initialDelay) {
+        this.initialDelay = delay;
+    }
+    
     /**
      * Set the {@link WriteConcern} for write operations on MongoDB using the standard ones.
      * Resolved from the fields of the WriteConcern class by calling the {@link WriteConcern#valueOf(String)} method.
@@ -225,4 +258,8 @@ public class GridFsEndpoint extends DefaultEndpoint {
     public void setGridFs(GridFS gridFs) {
         this.gridFs = gridFs;
     }
+    public DBCollection getFilesCollection() {
+        return filesCollection;
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/camel/blob/ce54b04e/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java b/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
index 7954644..58416b1 100644
--- a/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
@@ -31,11 +31,7 @@ import org.apache.camel.Exchange;
 import org.apache.camel.impl.DefaultProducer;
 
 
-public class GridFsProducer extends DefaultProducer {
-    public static final String GRIDFS_OPERATION = "gridfs.operation";
-    public static final String GRIDFS_METADATA = "gridfs.metadata";
-    public static final String GRIDFS_CHUNKSIZE = "gridfs.chunksize";
-    
+public class GridFsProducer extends DefaultProducer {    
     private GridFsEndpoint endpoint;
 
     public GridFsProducer(GridFsEndpoint endpoint) {
@@ -46,11 +42,11 @@ public class GridFsProducer extends DefaultProducer {
     public void process(Exchange exchange) throws Exception {
         String operation = endpoint.getOperation();
         if (operation == null) {
-            operation = exchange.getIn().getHeader(GRIDFS_OPERATION, String.class);
+            operation = exchange.getIn().getHeader(GridFsEndpoint.GRIDFS_OPERATION, String.class);
         }
         if (operation == null || "create".equals(operation)) {
             final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
-            Long chunkSize = exchange.getIn().getHeader(GRIDFS_CHUNKSIZE, Long.class);
+            Long chunkSize = exchange.getIn().getHeader(GridFsEndpoint.GRIDFS_CHUNKSIZE, Long.class);
 
             InputStream ins = exchange.getIn().getMandatoryBody(InputStream.class);
             GridFSInputFile gfsFile = endpoint.getGridFs().createFile(ins, filename, true);
@@ -61,7 +57,7 @@ public class GridFsProducer extends DefaultProducer {
             if (ct != null) {
                 gfsFile.setContentType(ct);
             }
-            String metaData = exchange.getIn().getHeader(GRIDFS_METADATA, String.class);
+            String metaData = exchange.getIn().getHeader(GridFsEndpoint.GRIDFS_METADATA, String.class);
             DBObject dbObject = (DBObject) JSON.parse(metaData);
             gfsFile.setMetaData(dbObject);
             gfsFile.save();
@@ -73,7 +69,7 @@ public class GridFsProducer extends DefaultProducer {
             final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
             GridFSDBFile file = endpoint.getGridFs().findOne(filename);
             if (file != null) {
-                exchange.getIn().setHeader(GRIDFS_METADATA, JSON.serialize(file.getMetaData()));
+                exchange.getIn().setHeader(GridFsEndpoint.GRIDFS_METADATA, JSON.serialize(file.getMetaData()));
                 exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType());
                 exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength());
                 exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate());

http://git-wip-us.apache.org/repos/asf/camel/blob/ce54b04e/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java b/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java
index b1c94b9..f4c2bff 100644
--- a/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java
+++ b/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java
@@ -29,8 +29,8 @@ import org.springframework.context.annotation.AnnotationConfigApplicationContext
 
 public abstract class AbstractMongoDbTest extends CamelTestSupport {
 
-    protected static MongoClient mongo;
-    protected static GridFS gridfs;
+    protected MongoClient mongo;
+    protected GridFS gridfs;
 
     protected ApplicationContext applicationContext;
 
@@ -38,9 +38,18 @@ public abstract class AbstractMongoDbTest extends CamelTestSupport {
     @Override
     public void doPostSetup() {
         mongo = applicationContext.getBean(MongoClient.class);
-        gridfs = new GridFS(mongo.getDB("test"));
+        gridfs = new GridFS(mongo.getDB("test"), getBucket());
     }
 
+    public String getBucket() {
+        return this.getClass().getSimpleName();
+    }
+    
+    @Override
+    public void tearDown() throws Exception {
+        super.tearDown();
+        mongo.close();
+    }
 
     @Override
     protected CamelContext createCamelContext() throws Exception {

http://git-wip-us.apache.org/repos/asf/camel/blob/ce54b04e/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerOperationsTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerOperationsTest.java b/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerOperationsTest.java
deleted file mode 100644
index 8aaa0de..0000000
--- a/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerOperationsTest.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.camel.component.gridfs;
-
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.camel.Exchange;
-import org.apache.camel.builder.RouteBuilder;
-import org.junit.Test;
-
-public class GridFsConsumerOperationsTest extends AbstractMongoDbTest {
-    
-    @Override
-    protected RouteBuilder createRouteBuilder() throws Exception {
-        return new RouteBuilder() {
-            public void configure() {
-                from("direct:create").to("gridfs:myDb?database={{mongodb.testDb}}&operation=create");
-                from("direct:remove").to("gridfs:myDb?database={{mongodb.testDb}}&operation=remove");
-                from("direct:findOne").to("gridfs:myDb?database={{mongodb.testDb}}&operation=findOne");
-                from("direct:listAll").to("gridfs:myDb?database={{mongodb.testDb}}&operation=listAll");
-                from("direct:count").to("gridfs:myDb?database={{mongodb.testDb}}&operation=count");
-                from("direct:headerOp").to("gridfs:myDb?database={{mongodb.testDb}}");
-            }
-        };
-    }
-    
-    @Test
-    public void testOperations() throws Exception {
-        Map<String, Object> headers = new HashMap<String, Object>();
-        String fn = "filename.for.db.txt";
-        assertEquals(0, gridfs.find(fn).size());
-        
-        headers.put(Exchange.FILE_NAME, fn);
-        String data = "This is some stuff to go into the db";
-        template.requestBodyAndHeaders("direct:create", data, headers);
-        assertEquals(1, gridfs.find(fn).size());
-        assertEquals(1, template.requestBodyAndHeaders("direct:count", null, headers));
-        InputStream ins = template.requestBodyAndHeaders("direct:findOne", null, headers, InputStream.class);
-        assertNotNull(ins);
-        byte b[] = new byte[2048];
-        int i = ins.read(b);
-        assertEquals(data, new String(b, 0, i, "utf-8"));
-        
-        headers.put(Exchange.FILE_NAME, "2-" + fn);
-        
-        template.requestBodyAndHeaders("direct:create", data + "data2", headers);
-        assertEquals(1, template.requestBodyAndHeaders("direct:count", null, headers));
-        assertEquals(2, template.requestBody("direct:count", null, Integer.class).intValue());
-        
-        String s = template.requestBody("direct:listAll", null, String.class);
-        assertTrue(s.contains("2-" + fn));
-        template.requestBodyAndHeaders("direct:remove", null, headers);
-        assertEquals(1, template.requestBody("direct:count", null, Integer.class).intValue());
-        s = template.requestBody("direct:listAll", null, String.class);
-        assertFalse(s.contains("2-" + fn));
-    }
-}
-

http://git-wip-us.apache.org/repos/asf/camel/blob/ce54b04e/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java b/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java
new file mode 100644
index 0000000..a84260c
--- /dev/null
+++ b/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.camel.component.gridfs;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.component.mock.MockEndpoint;
+
+import org.junit.Test;
+
+/**
+ * 
+ */
+public class GridFsConsumerTest extends AbstractMongoDbTest {
+    @Override
+    protected RouteBuilder createRouteBuilder() throws Exception {
+        return new RouteBuilder() {
+            public void configure() {
+                from("direct:create").to("gridfs:myDb?database={{mongodb.testDb}}&operation=create&bucket=" + getBucket());
+                from("gridfs:myDb?database={{mongodb.testDb}}&bucket=" + getBucket()).convertBodyTo(String.class).to("mock:test");
+            }
+        };
+    }
+    
+    
+    @Test
+    public void test() throws Exception {
+        MockEndpoint mock = getMockEndpoint("mock:test");
+        String data = "This is some stuff to go into the db";
+        mock.expectedMessageCount(1);
+        mock.expectedBodiesReceived(data);
+        
+        Map<String, Object> headers = new HashMap<String, Object>();
+        String fn = "filename.for.db.txt";
+        assertEquals(0, gridfs.find(fn).size());
+        
+        headers.put(Exchange.FILE_NAME, fn);
+        template.requestBodyAndHeaders("direct:create", data, headers);
+        
+        mock.assertIsSatisfied();
+        mock.reset();
+        
+        mock.expectedMessageCount(3);
+        mock.expectedBodiesReceived(data, data, data);
+        
+        headers.put(Exchange.FILE_NAME, fn + "_1");
+        template.requestBodyAndHeaders("direct:create", data, headers);
+        headers.put(Exchange.FILE_NAME, fn + "_2");
+        template.requestBodyAndHeaders("direct:create", data, headers);
+        headers.put(Exchange.FILE_NAME, fn + "_3");
+        template.requestBodyAndHeaders("direct:create", data, headers);
+        mock.assertIsSatisfied();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/camel/blob/ce54b04e/src/test/java/org/apache/camel/component/gridfs/GridFsProducerOperationsTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/camel/component/gridfs/GridFsProducerOperationsTest.java b/src/test/java/org/apache/camel/component/gridfs/GridFsProducerOperationsTest.java
new file mode 100644
index 0000000..df7882d
--- /dev/null
+++ b/src/test/java/org/apache/camel/component/gridfs/GridFsProducerOperationsTest.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.gridfs;
+
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.builder.RouteBuilder;
+import org.junit.Test;
+
+public class GridFsProducerOperationsTest extends AbstractMongoDbTest {
+    
+    @Override
+    protected RouteBuilder createRouteBuilder() throws Exception {
+        return new RouteBuilder() {
+            public void configure() {
+                from("direct:create").to("gridfs:myDb?database={{mongodb.testDb}}&operation=create&bucket=" + getBucket());
+                from("direct:remove").to("gridfs:myDb?database={{mongodb.testDb}}&operation=remove&bucket=" + getBucket());
+                from("direct:findOne").to("gridfs:myDb?database={{mongodb.testDb}}&operation=findOne&bucket=" + getBucket());
+                from("direct:listAll").to("gridfs:myDb?database={{mongodb.testDb}}&operation=listAll&bucket=" + getBucket());
+                from("direct:count").to("gridfs:myDb?database={{mongodb.testDb}}&operation=count&bucket=" + getBucket());
+                from("direct:headerOp").to("gridfs:myDb?database={{mongodb.testDb}}&bucket=" + getBucket());
+            }
+        };
+    }
+    
+    @Test
+    public void testOperations() throws Exception {
+        Map<String, Object> headers = new HashMap<String, Object>();
+        String fn = "filename.for.db.txt";
+        assertEquals(0, gridfs.find(fn).size());
+        
+        headers.put(Exchange.FILE_NAME, fn);
+        String data = "This is some stuff to go into the db";
+        template.requestBodyAndHeaders("direct:create", data, headers);
+        assertEquals(1, gridfs.find(fn).size());
+        assertEquals(1, template.requestBodyAndHeaders("direct:count", null, headers));
+        InputStream ins = template.requestBodyAndHeaders("direct:findOne", null, headers, InputStream.class);
+        assertNotNull(ins);
+        byte b[] = new byte[2048];
+        int i = ins.read(b);
+        assertEquals(data, new String(b, 0, i, "utf-8"));
+        
+        headers.put(Exchange.FILE_NAME, "2-" + fn);
+        
+        template.requestBodyAndHeaders("direct:create", data + "data2", headers);
+        assertEquals(1, template.requestBodyAndHeaders("direct:count", null, headers));
+        assertEquals(2, template.requestBody("direct:count", null, Integer.class).intValue());
+        
+        String s = template.requestBody("direct:listAll", null, String.class);
+        assertTrue(s.contains("2-" + fn));
+        template.requestBodyAndHeaders("direct:remove", null, headers);
+        assertEquals(1, template.requestBody("direct:count", null, Integer.class).intValue());
+        s = template.requestBody("direct:listAll", null, String.class);
+        assertFalse(s.contains("2-" + fn));
+    }
+}
+


[7/9] camel git commit: Add 'components/camel-gridfs/' from commit '9ec105a3e251a59d982146aea5c4f2df0505866f'

Posted by dk...@apache.org.
Add 'components/camel-gridfs/' from commit '9ec105a3e251a59d982146aea5c4f2df0505866f'

git-subtree-dir: components/camel-gridfs
git-subtree-mainline: 837f355c2784f09c3369e5930190b22b02045980
git-subtree-split: 9ec105a3e251a59d982146aea5c4f2df0505866f


Project: http://git-wip-us.apache.org/repos/asf/camel/repo
Commit: http://git-wip-us.apache.org/repos/asf/camel/commit/f1e21c53
Tree: http://git-wip-us.apache.org/repos/asf/camel/tree/f1e21c53
Diff: http://git-wip-us.apache.org/repos/asf/camel/diff/f1e21c53

Branch: refs/heads/master
Commit: f1e21c5370188ef7018962f11e8ef2af1974f4f7
Parents: 837f355 9ec105a
Author: Daniel Kulp <dk...@apache.org>
Authored: Tue Mar 1 14:39:22 2016 -0500
Committer: Daniel Kulp <dk...@apache.org>
Committed: Tue Mar 1 14:39:22 2016 -0500

----------------------------------------------------------------------
 components/camel-gridfs/.gitignore              |   3 +
 components/camel-gridfs/pom.xml                 |  69 ++++
 .../camel/component/gridfs/GridFsComponent.java |  62 ++++
 .../camel/component/gridfs/GridFsConsumer.java  | 180 ++++++++++
 .../camel/component/gridfs/GridFsEndpoint.java  | 360 +++++++++++++++++++
 .../camel/component/gridfs/GridFsProducer.java  | 145 ++++++++
 .../src/main/resources/META-INF/LICENSE.txt     | 203 +++++++++++
 .../src/main/resources/META-INF/NOTICE.txt      |  11 +
 .../services/org/apache/camel/component/gridfs  |  18 +
 .../component/gridfs/AbstractMongoDbTest.java   |  62 ++++
 .../gridfs/EmbedMongoConfiguration.java         |  58 +++
 .../component/gridfs/GridFsConsumerTest.java    | 101 ++++++
 .../gridfs/GridFsProducerOperationsTest.java    |  74 ++++
 .../src/test/resources/log4j.properties         |  37 ++
 .../src/test/resources/mongodb.test.properties  |  21 ++
 15 files changed, 1404 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/camel/blob/f1e21c53/components/camel-gridfs/.gitignore
----------------------------------------------------------------------
diff --cc components/camel-gridfs/.gitignore
index 0000000,0000000..fe30fe5
new file mode 100644
--- /dev/null
+++ b/components/camel-gridfs/.gitignore
@@@ -1,0 -1,0 +1,3 @@@
++target
++*.i??
++.idea

http://git-wip-us.apache.org/repos/asf/camel/blob/f1e21c53/components/camel-gridfs/pom.xml
----------------------------------------------------------------------
diff --cc components/camel-gridfs/pom.xml
index 0000000,0000000..ade40e7
new file mode 100644
--- /dev/null
+++ b/components/camel-gridfs/pom.xml
@@@ -1,0 -1,0 +1,69 @@@
++<?xml version="1.0" encoding="UTF-8"?>
++<!--
++
++    Licensed to the Apache Software Foundation (ASF) under one or more
++    contributor license agreements.  See the NOTICE file distributed with
++    this work for additional information regarding copyright ownership.
++    The ASF licenses this file to You under the Apache License, Version 2.0
++    (the "License"); you may not use this file except in compliance with
++    the License.  You may obtain a copy of the License at
++
++       http://www.apache.org/licenses/LICENSE-2.0
++
++    Unless required by applicable law or agreed to in writing, software
++    distributed under the License is distributed on an "AS IS" BASIS,
++    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++    See the License for the specific language governing permissions and
++    limitations under the License.
++
++-->
++<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
++
++    <modelVersion>4.0.0</modelVersion>
++
++    <parent>
++        <groupId>org.apache.camel</groupId>
++        <artifactId>components</artifactId>
++        <version>2.16.2</version>
++    </parent>
++
++    <artifactId>camel-gridfs</artifactId>
++    <packaging>bundle</packaging>
++    <name>Camel :: MongoDB GridFS</name>
++    <description>Camel MongoDB GridFS component</description>
++
++    <properties>
++        <camel.osgi.export.pkg>org.apache.camel.component.gridfs.*</camel.osgi.export.pkg>
++        <camel.osgi.export.service>org.apache.camel.spi.ComponentResolver;component=gridfs</camel.osgi.export.service>
++    </properties>
++
++    <dependencies>
++
++        <dependency>
++            <groupId>org.apache.camel</groupId>
++            <artifactId>camel-core</artifactId>
++        </dependency>
++
++        <dependency>
++            <groupId>org.apache.camel</groupId>
++            <artifactId>camel-jackson</artifactId>
++        </dependency>
++
++        <dependency>
++            <groupId>org.mongodb</groupId>
++            <artifactId>mongo-java-driver</artifactId>
++            <version>3.2.2</version>
++        </dependency>
++        <dependency>
++            <groupId>de.flapdoodle.embed</groupId>
++            <artifactId>de.flapdoodle.embed.mongo</artifactId>
++            <scope>test</scope>
++        </dependency>
++	<dependency>
++          <groupId>org.apache.camel</groupId>
++          <artifactId>camel-test-spring</artifactId>
++          <scope>test</scope>
++        </dependency>
++    </dependencies>
++
++</project>

http://git-wip-us.apache.org/repos/asf/camel/blob/f1e21c53/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
----------------------------------------------------------------------
diff --cc components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
index 0000000,0000000..26da915
new file mode 100644
--- /dev/null
+++ b/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
@@@ -1,0 -1,0 +1,62 @@@
++/**
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *      http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++package org.apache.camel.component.gridfs;
++
++import com.mongodb.Mongo;
++import org.apache.camel.Endpoint;
++import org.apache.camel.impl.UriEndpointComponent;
++import org.apache.camel.util.CamelContextHelper;
++import org.slf4j.Logger;
++import org.slf4j.LoggerFactory;
++
++import java.util.Map;
++
++public class GridFsComponent extends UriEndpointComponent {
++
++    private final static Logger LOG = LoggerFactory.getLogger(GridFsComponent.class);
++
++    private volatile Mongo db;
++
++    public GridFsComponent() {
++        super(GridFsEndpoint.class);
++    }
++    
++    protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
++        if (db == null) {
++            db = CamelContextHelper.mandatoryLookup(getCamelContext(), remaining, Mongo.class);
++            LOG.debug("Resolved the connection with the name {} as {}", remaining, db);
++        }
++
++        GridFsEndpoint endpoint = new GridFsEndpoint(uri, this);
++        parameters.put("mongoConnection", db);
++        endpoint.setConnectionBean(remaining);
++        endpoint.setMongoConnection(db);
++        setProperties(endpoint, parameters);
++        
++        return endpoint;
++    }
++
++    @Override
++    protected void doShutdown() throws Exception {
++        if (db != null) {
++            LOG.debug("Closing the connection {} on {}", db, this);
++            db.close();
++        }
++        super.doShutdown();
++    }
++
++}

http://git-wip-us.apache.org/repos/asf/camel/blob/f1e21c53/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
----------------------------------------------------------------------
diff --cc components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
index 0000000,0000000..35d77ee
new file mode 100644
--- /dev/null
+++ b/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
@@@ -1,0 -1,0 +1,180 @@@
++/**
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements. See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership. The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License. You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied. See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++
++package org.apache.camel.component.gridfs;
++
++import java.io.InputStream;
++import java.util.concurrent.ExecutorService;
++
++import com.mongodb.BasicDBObject;
++import com.mongodb.BasicDBObjectBuilder;
++import com.mongodb.DBCollection;
++import com.mongodb.DBCursor;
++import com.mongodb.DBObject;
++import com.mongodb.MongoException;
++import com.mongodb.gridfs.GridFSDBFile;
++import com.mongodb.util.JSON;
++
++import org.apache.camel.Exchange;
++import org.apache.camel.Processor;
++import org.apache.camel.component.gridfs.GridFsEndpoint.QueryStrategy;
++import org.apache.camel.impl.DefaultConsumer;
++
++/**
++ * 
++ */
++public class GridFsConsumer extends DefaultConsumer implements Runnable {
++    final GridFsEndpoint endpoint;
++    private ExecutorService executor;
++
++    /**
++     * @param endpoint
++     * @param processor
++     */
++    public GridFsConsumer(GridFsEndpoint endpoint, Processor processor) {
++        super(endpoint, processor);
++        this.endpoint = endpoint;
++    }
++
++    @Override
++    protected void doStop() throws Exception {
++        super.doStop();
++        if (executor != null) {
++            executor.shutdown();
++            executor = null;
++        }
++    }
++
++    @Override
++    protected void doStart() throws Exception {
++        super.doStart();
++        executor = endpoint.getCamelContext().getExecutorServiceManager().newFixedThreadPool(this, endpoint.getEndpointUri(), 1);
++        executor.execute(this);
++    }
++
++    @Override
++    public void run() {
++        DBCursor c = null;
++        java.util.Date fromDate = null;
++        
++        QueryStrategy s = endpoint.getQueryStrategy();
++        boolean usesTimestamp = (s != QueryStrategy.FileAttribute);
++        boolean persistsTimestamp = (s == QueryStrategy.PersistentTimestamp || s == QueryStrategy.PersistentTimestampAndFileAttribute);
++        boolean usesAttribute = (s == QueryStrategy.FileAttribute 
++            || s == QueryStrategy.TimeStampAndFileAttribute 
++            || s == QueryStrategy.PersistentTimestampAndFileAttribute);
++        
++        DBCollection ptsCollection = null;
++        DBObject persistentTimestamp = null;
++        if (persistsTimestamp) {
++            ptsCollection = endpoint.getDB().getCollection(endpoint.getPersistentTSCollection());
++         // ensure standard indexes as long as collections are small
++            try {
++                if (ptsCollection.count() < 1000) {
++                    ptsCollection.createIndex(new BasicDBObject("id", 1));
++                }
++            } catch (MongoException e) {
++                //TODO: Logging
++            }
++            persistentTimestamp = ptsCollection.findOne(new BasicDBObject("id", endpoint.getPersistentTSObject()));
++            if (persistentTimestamp == null) {
++                persistentTimestamp = new BasicDBObject("id", endpoint.getPersistentTSObject());
++                fromDate = new java.util.Date();
++                persistentTimestamp.put("timestamp", fromDate);
++                ptsCollection.save(persistentTimestamp);
++            }
++            fromDate = (java.util.Date)persistentTimestamp.get("timestamp");
++        } else if (usesTimestamp) {
++            fromDate = new java.util.Date();
++        }
++        try {
++            Thread.sleep(endpoint.getInitialDelay());
++            while (isStarted()) {                
++                if (c == null || c.getCursorId() == 0) {
++                    if (c != null) {
++                        c.close();
++                    }
++                    String queryString = endpoint.getQuery();
++                    DBObject query;
++                    if (queryString == null) {
++                        query = new BasicDBObject();
++                    } else {
++                        query = (DBObject) JSON.parse(queryString);
++                    }
++                    if (usesTimestamp) {
++                        query.put("uploadDate", new BasicDBObject("$gt", fromDate));
++                    }
++                    if (usesAttribute) {
++                        query.put(endpoint.getFileAttributeName(), null);
++                    }
++                    c = endpoint.getFilesCollection().find(query);
++                }
++                boolean dateModified = false;
++                while (c.hasNext() && isStarted()) {
++                    GridFSDBFile file = (GridFSDBFile)c.next();
++                    GridFSDBFile forig = file;
++                    if (usesAttribute) {
++                        file.put(endpoint.getFileAttributeName(), "processing");
++                        DBObject q = BasicDBObjectBuilder.start("_id", file.getId()).append("camel-processed", null).get();
++                        forig = (GridFSDBFile)endpoint.getFilesCollection().findAndModify(q, null, null, false, file, true, false);
++                    }
++                    if (forig != null) {
++                        file = endpoint.getGridFs().findOne(new BasicDBObject("_id", file.getId()));
++                        
++                        Exchange exchange = endpoint.createExchange();
++                        exchange.getIn().setHeader(GridFsEndpoint.GRIDFS_METADATA, JSON.serialize(file.getMetaData()));
++                        exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType());
++                        exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength());
++                        exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate());
++                        exchange.getIn().setBody(file.getInputStream(), InputStream.class);
++                        try {
++                            getProcessor().process(exchange);
++                            //System.out.println("Processing " + file.getFilename());
++                            if (usesAttribute) {
++                                forig.put(endpoint.getFileAttributeName(), "done");
++                                endpoint.getFilesCollection().save(forig);
++                            }
++                            if (usesTimestamp) {
++                                if (file.getUploadDate().compareTo(fromDate) > 0) {
++                                    fromDate = file.getUploadDate();
++                                    dateModified = true;
++                                }
++                            }
++                        } catch (Exception e) {
++                            // TODO Auto-generated catch block
++                            e.printStackTrace();
++                        }
++                    }
++                }
++                if (persistsTimestamp && dateModified) {
++                    persistentTimestamp.put("timestamp", fromDate);
++                    ptsCollection.save(persistentTimestamp);
++                }
++                Thread.sleep(endpoint.getDelay());
++            }
++        } catch (Throwable e1) {
++            // TODO Auto-generated catch block
++            e1.printStackTrace();
++        }
++        if (c != null) {
++            c.close();
++        }
++    }
++    
++}

http://git-wip-us.apache.org/repos/asf/camel/blob/f1e21c53/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
----------------------------------------------------------------------
diff --cc components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
index 0000000,0000000..4856a7f
new file mode 100644
--- /dev/null
+++ b/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
@@@ -1,0 -1,0 +1,360 @@@
++/**
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *      http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++package org.apache.camel.component.gridfs;
++
++import com.mongodb.DB;
++import com.mongodb.DBCollection;
++import com.mongodb.Mongo;
++import com.mongodb.ReadPreference;
++import com.mongodb.WriteConcern;
++import com.mongodb.gridfs.GridFS;
++import org.apache.camel.Consumer;
++import org.apache.camel.Processor;
++import org.apache.camel.Producer;
++import org.apache.camel.impl.DefaultEndpoint;
++import org.apache.camel.spi.Metadata;
++import org.apache.camel.spi.UriEndpoint;
++import org.apache.camel.spi.UriParam;
++import org.apache.camel.spi.UriPath;
++import org.slf4j.Logger;
++import org.slf4j.LoggerFactory;
++
++@UriEndpoint(scheme = "gridfs", title = "MongoDBGridFS", syntax = "gridfs:connectionBean", 
++            label = "database,nosql")
++public class GridFsEndpoint extends DefaultEndpoint {
++    
++    public enum QueryStrategy {
++        TimeStamp,
++        PersistentTimestamp,
++        FileAttribute,
++        TimeStampAndFileAttribute,
++        PersistentTimestampAndFileAttribute
++    };
++    public static final String GRIDFS_OPERATION = "gridfs.operation";
++    public static final String GRIDFS_METADATA = "gridfs.metadata";
++    public static final String GRIDFS_CHUNKSIZE = "gridfs.chunksize";
++
++    private static final Logger LOG = LoggerFactory.getLogger(GridFsEndpoint.class);
++
++    @UriPath @Metadata(required = "true")
++    private String connectionBean;
++    @UriParam @Metadata(required = "true")
++    private String database;
++    @UriParam(defaultValue = GridFS.DEFAULT_BUCKET)
++    private String bucket;
++    @UriParam(enums = "ACKNOWLEDGED,W1,W2,W3,UNACKNOWLEDGED,JOURNALED,MAJORITY,SAFE")
++    private WriteConcern writeConcern;
++    @UriParam
++    private WriteConcern writeConcernRef;
++    @UriParam
++    private ReadPreference readPreference;
++    
++    @UriParam(label = "producer")
++    private String operation;
++
++    @UriParam(label = "consumer")
++    private String query;
++    @UriParam(label = "consumer", defaultValue = "1000")
++    private long initialDelay = 1000;
++    @UriParam(label = "consumer", defaultValue = "500")
++    private long delay = 500;
++    
++    @UriParam(label = "consumer", defaultValue = "TimeStamp")
++    private QueryStrategy queryStrategy = QueryStrategy.TimeStamp;
++    @UriParam(label = "consumer", defaultValue = "camel-timestamps")
++    private String persistentTSCollection = "camel-timestamps";
++    @UriParam(label = "consumer", defaultValue = "camel-timestamp")
++    private String persistentTSObject = "camel-timestamp";
++    @UriParam(label = "consumer", defaultValue = "camel-processed")
++    private String fileAttributeName = "camel-processed";
++
++
++    private Mongo mongoConnection;
++    private DB db;
++    private GridFS gridFs;
++    private DBCollection filesCollection;
++
++    public GridFsEndpoint(String uri, GridFsComponent component) {
++        super(uri, component);
++    }
++
++    @Override
++    public Producer createProducer() throws Exception {
++        initializeConnection();
++        return new GridFsProducer(this);
++    }
++
++    @Override
++    public Consumer createConsumer(Processor processor) throws Exception {
++        initializeConnection();
++        return new GridFsConsumer(this, processor);
++    }
++
++    public boolean isSingleton() {
++        return true;
++    }
++
++    @SuppressWarnings("deprecation")
++    public void initializeConnection() throws Exception {
++        LOG.info("Initialize GridFS endpoint: {}", this.toString());
++        if (database == null) {
++            throw new IllegalStateException("Missing required endpoint configuration: database");
++        }
++        db = mongoConnection.getDB(database);
++        if (db == null) {
++            throw new IllegalStateException("Could not initialize GridFsComponent. Database " + database + " does not exist.");
++        }
++        gridFs = new GridFS(db, bucket == null ? GridFS.DEFAULT_BUCKET : bucket) {
++            {
++                filesCollection = getFilesCollection();
++            }
++        };
++    }
++
++    
++    @Override
++    protected void doStart() throws Exception {
++        if (writeConcern != null && writeConcernRef != null) {
++            String msg = "Cannot set both writeConcern and writeConcernRef at the same time. Respective values: " + writeConcern
++                    + ", " + writeConcernRef + ". Aborting initialization.";
++            throw new IllegalArgumentException(msg);
++        }
++
++        setWriteReadOptionsOnConnection();
++        super.doStart();
++    }
++    private void setWriteReadOptionsOnConnection() {
++        // Set the WriteConcern
++        if (writeConcern != null) {
++            mongoConnection.setWriteConcern(writeConcern);
++        } else if (writeConcernRef != null) {
++            mongoConnection.setWriteConcern(writeConcernRef);
++        }
++
++        // Set the ReadPreference
++        if (readPreference != null) {
++            mongoConnection.setReadPreference(readPreference);
++        }
++    }
++    
++    
++    
++    
++    // ======= Getters and setters ===============================================
++    public String getConnectionBean() {
++        return connectionBean;
++    }
++    /**
++     * Name of {@link com.mongodb.Mongo} to use.
++     */
++    public void setConnectionBean(String connectionBean) {
++        this.connectionBean = connectionBean;
++    }
++    
++    public Mongo getMongoConnection() {
++        return mongoConnection;
++    }
++    /**
++     * Sets the Mongo instance that represents the backing connection
++     * 
++     * @param mongoConnection the connection to the database
++     */
++    public void setMongoConnection(Mongo mongoConnection) {
++        this.mongoConnection = mongoConnection;
++    }
++
++    public DB getDB() {
++        return db;
++    }
++    
++    public String getDatabase() {
++        return database;
++    }
++    /**
++     * Sets the name of the MongoDB database to target
++     * 
++     * @param database name of the MongoDB database
++     */
++    public void setDatabase(String database) {
++        this.database = database;
++    }
++    /**
++     * Sets the name of the GridFS bucket within the database.   Default is "fs".
++     * 
++     * @param database name of the MongoDB database
++     */
++    public String getBucket() {
++        return bucket;
++    }
++    public void setBucket(String bucket) {
++        this.bucket = bucket;
++    }
++    
++    public String getQuery() {
++        return query;
++    }
++    /**
++     * Additional query parameters (in JSON) that are used to configure the query used for finding
++     * files in the GridFsConsumer
++     * @param query
++     */
++    public void setQuery(String query) {
++        this.query = query;
++    }
++    public long getDelay() {
++        return delay;
++    }
++    /**
++     * Sets the delay between polls within the Consumer.  Default is 500ms
++     * @param delay
++     */
++    public void setDelay(long delay) {
++        this.delay = delay;
++    }
++    public long getInitialDelay() {
++        return initialDelay;
++    }
++    /**
++     * Sets the initialDelay before the consumer will start polling.  Default is 1000ms
++     * @param initialDelay
++     */
++    public void setInitialDelay(long initialDelay) {
++        this.initialDelay = delay;
++    }
++    
++    /**
++     * Sets the QueryStrategy that is used for polling for new files.  Default is Timestamp
++     * @see QueryStrategy
++     * @param s
++     */
++    public void setQueryStrategy(String s) {
++        queryStrategy = QueryStrategy.valueOf(s);
++    }
++    public QueryStrategy getQueryStrategy() {
++        return queryStrategy;
++    }
++    /**
++     * If the QueryType uses a persistent timestamp, this sets the name of the collection within
++     * the DB to store the timestamp.
++     * @param s
++     */
++    public void setPersistentTSCollection(String s) {
++        persistentTSCollection = s;
++    }
++    public String getPersistentTSCollection() {
++        return persistentTSCollection;
++    }
++    /**
++     * If the QueryType uses a persistent timestamp, this is the ID of the object in the collection
++     * to store the timestamp.   
++     * @param s
++     */
++    public void setPersistentTSObject(String id) {
++        persistentTSObject = id;
++    }
++    public String getPersistentTSObject() {
++        return persistentTSObject;
++    }
++    
++    /**
++     * If the QueryType uses a FileAttribute, this sets the name of the attribute that is used. Default is "camel-processed".
++     * @param f
++     */
++    public void setFileAttributeName(String f) {
++        fileAttributeName = f;
++    }
++    public String getFileAttributeName() {
++        return fileAttributeName;
++    }   
++    
++    /**
++     * Set the {@link WriteConcern} for write operations on MongoDB using the standard ones.
++     * Resolved from the fields of the WriteConcern class by calling the {@link WriteConcern#valueOf(String)} method.
++     * 
++     * @param writeConcern the standard name of the WriteConcern
++     * @see <a href="http://api.mongodb.org/java/current/com/mongodb/WriteConcern.html#valueOf(java.lang.String)">possible options</a>
++     */
++    public void setWriteConcern(String writeConcern) {
++        this.writeConcern = WriteConcern.valueOf(writeConcern);
++    }
++
++    public WriteConcern getWriteConcern() {
++        return writeConcern;
++    }
++
++    /**
++     * Set the {@link WriteConcern} for write operations on MongoDB, passing in the bean ref to a custom WriteConcern which exists in the Registry.
++     * You can also use standard WriteConcerns by passing in their key. See the {@link #setWriteConcern(String) setWriteConcern} method.
++     * 
++     * @param writeConcernRef the name of the bean in the registry that represents the WriteConcern to use
++     */
++    public void setWriteConcernRef(String writeConcernRef) {
++        WriteConcern wc = this.getCamelContext().getRegistry().lookupByNameAndType(writeConcernRef, WriteConcern.class);
++        if (wc == null) {
++            String msg = "Camel MongoDB component could not find the WriteConcern in the Registry. Verify that the "
++                    + "provided bean name (" + writeConcernRef + ")  is correct. Aborting initialization.";
++            throw new IllegalArgumentException(msg);
++        }
++
++        this.writeConcernRef = wc;
++    }
++
++    public WriteConcern getWriteConcernRef() {
++        return writeConcernRef;
++    }
++
++    /** 
++     * Sets a MongoDB {@link ReadPreference} on the Mongo connection. Read preferences set directly on the connection will be
++     * overridden by this setting.
++     * <p/>
++     * The {@link com.mongodb.ReadPreference#valueOf(String)} utility method is used to resolve the passed {@code readPreference}
++     * value. Some examples for the possible values are {@code nearest}, {@code primary} or {@code secondary} etc.
++     * 
++     * @param readPreference the name of the read preference to set
++     */
++    public void setReadPreference(String readPreference) {
++        this.readPreference = ReadPreference.valueOf(readPreference);
++    }
++
++    public ReadPreference getReadPreference() {
++        return readPreference;
++    }
++    
++    
++    /**
++     * Sets the operation this endpoint will execute against GridRS.
++     */
++    public void setOperation(String operation) {
++        this.operation = operation;
++    }
++
++    public String getOperation() {
++        return operation;
++    }
++
++    public GridFS getGridFs() {
++        return gridFs;
++    }
++
++    public void setGridFs(GridFS gridFs) {
++        this.gridFs = gridFs;
++    }
++    public DBCollection getFilesCollection() {
++        return filesCollection;
++    }
++
++}

http://git-wip-us.apache.org/repos/asf/camel/blob/f1e21c53/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
----------------------------------------------------------------------
diff --cc components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
index 0000000,0000000..58416b1
new file mode 100644
--- /dev/null
+++ b/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
@@@ -1,0 -1,0 +1,145 @@@
++/**
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *      http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++package org.apache.camel.component.gridfs;
++
++import java.io.FileNotFoundException;
++import java.io.IOException;
++import java.io.InputStream;
++import java.io.Reader;
++
++import com.mongodb.BasicDBObject;
++import com.mongodb.DBCursor;
++import com.mongodb.DBObject;
++import com.mongodb.gridfs.GridFSDBFile;
++import com.mongodb.gridfs.GridFSInputFile;
++import com.mongodb.util.JSON;
++import org.apache.camel.Exchange;
++import org.apache.camel.impl.DefaultProducer;
++
++
++public class GridFsProducer extends DefaultProducer {    
++    private GridFsEndpoint endpoint;
++
++    public GridFsProducer(GridFsEndpoint endpoint) {
++        super(endpoint);
++        this.endpoint = endpoint;
++    }
++
++    public void process(Exchange exchange) throws Exception {
++        String operation = endpoint.getOperation();
++        if (operation == null) {
++            operation = exchange.getIn().getHeader(GridFsEndpoint.GRIDFS_OPERATION, String.class);
++        }
++        if (operation == null || "create".equals(operation)) {
++            final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
++            Long chunkSize = exchange.getIn().getHeader(GridFsEndpoint.GRIDFS_CHUNKSIZE, Long.class);
++
++            InputStream ins = exchange.getIn().getMandatoryBody(InputStream.class);
++            GridFSInputFile gfsFile = endpoint.getGridFs().createFile(ins, filename, true);
++            if (chunkSize != null && chunkSize > 0) {
++                gfsFile.setChunkSize(chunkSize);
++            }
++            final String ct = exchange.getIn().getHeader(Exchange.CONTENT_TYPE, String.class);
++            if (ct != null) {
++                gfsFile.setContentType(ct);
++            }
++            String metaData = exchange.getIn().getHeader(GridFsEndpoint.GRIDFS_METADATA, String.class);
++            DBObject dbObject = (DBObject) JSON.parse(metaData);
++            gfsFile.setMetaData(dbObject);
++            gfsFile.save();
++            exchange.getIn().setHeader(Exchange.FILE_NAME_PRODUCED, gfsFile.getFilename());
++        } else if ("remove".equals(operation)) {
++            final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
++            endpoint.getGridFs().remove(filename);
++        } else if ("findOne".equals(operation)) {
++            final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
++            GridFSDBFile file = endpoint.getGridFs().findOne(filename);
++            if (file != null) {
++                exchange.getIn().setHeader(GridFsEndpoint.GRIDFS_METADATA, JSON.serialize(file.getMetaData()));
++                exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType());
++                exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength());
++                exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate());
++                exchange.getIn().setBody(file.getInputStream(), InputStream.class);                
++            } else {
++                throw new FileNotFoundException("No GridFS file for " + filename);
++            }
++        } else if ("listAll".equals(operation)) {
++            final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
++            DBCursor cursor;
++            if (filename == null) {
++                cursor = endpoint.getGridFs().getFileList();
++            } else {
++                cursor = endpoint.getGridFs().getFileList(new BasicDBObject("filename", filename));
++            }
++            exchange.getIn().setBody(new DBCursorFilenameReader(cursor), Reader.class);
++        } else if ("count".equals(operation)) {
++            final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
++            DBCursor cursor;
++            if (filename == null) {
++                cursor = endpoint.getGridFs().getFileList();
++            } else {
++                cursor = endpoint.getGridFs().getFileList(new BasicDBObject("filename", filename));
++            }
++            exchange.getIn().setBody(cursor.count(), Integer.class);
++        } 
++        
++    }
++
++    
++    private class DBCursorFilenameReader extends Reader {
++        DBCursor cursor;
++        StringBuilder current;
++        int pos;
++        
++        DBCursorFilenameReader(DBCursor c) {
++            cursor = c;
++            current = new StringBuilder(4096);
++            pos = 0;
++            fill();
++        }
++        void fill() {
++            if (pos > 0) {
++                current.delete(0, pos);
++                pos = 0;
++            }
++            while (cursor.hasNext() && current.length() < 4000) {
++                DBObject o = cursor.next();
++                current.append(o.get("filename")).append("\n");
++            }
++        }
++        @Override
++        public int read(char[] cbuf, int off, int len) throws IOException {
++            if (pos == current.length()) {
++                fill();
++            }
++            if (pos == current.length()) {
++                return -1;
++            }
++            if (len > (current.length() - pos)) {
++                len = current.length() - pos;
++            }
++            current.getChars(pos, pos + len, cbuf, off);
++            pos += len;
++            return len;
++        }
++
++        @Override
++        public void close() throws IOException {
++            cursor.close();
++        }
++    }
++}

http://git-wip-us.apache.org/repos/asf/camel/blob/f1e21c53/components/camel-gridfs/src/main/resources/META-INF/LICENSE.txt
----------------------------------------------------------------------
diff --cc components/camel-gridfs/src/main/resources/META-INF/LICENSE.txt
index 0000000,0000000..6b0b127
new file mode 100644
--- /dev/null
+++ b/components/camel-gridfs/src/main/resources/META-INF/LICENSE.txt
@@@ -1,0 -1,0 +1,203 @@@
++
++                                 Apache License
++                           Version 2.0, January 2004
++                        http://www.apache.org/licenses/
++
++   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
++
++   1. Definitions.
++
++      "License" shall mean the terms and conditions for use, reproduction,
++      and distribution as defined by Sections 1 through 9 of this document.
++
++      "Licensor" shall mean the copyright owner or entity authorized by
++      the copyright owner that is granting the License.
++
++      "Legal Entity" shall mean the union of the acting entity and all
++      other entities that control, are controlled by, or are under common
++      control with that entity. For the purposes of this definition,
++      "control" means (i) the power, direct or indirect, to cause the
++      direction or management of such entity, whether by contract or
++      otherwise, or (ii) ownership of fifty percent (50%) or more of the
++      outstanding shares, or (iii) beneficial ownership of such entity.
++
++      "You" (or "Your") shall mean an individual or Legal Entity
++      exercising permissions granted by this License.
++
++      "Source" form shall mean the preferred form for making modifications,
++      including but not limited to software source code, documentation
++      source, and configuration files.
++
++      "Object" form shall mean any form resulting from mechanical
++      transformation or translation of a Source form, including but
++      not limited to compiled object code, generated documentation,
++      and conversions to other media types.
++
++      "Work" shall mean the work of authorship, whether in Source or
++      Object form, made available under the License, as indicated by a
++      copyright notice that is included in or attached to the work
++      (an example is provided in the Appendix below).
++
++      "Derivative Works" shall mean any work, whether in Source or Object
++      form, that is based on (or derived from) the Work and for which the
++      editorial revisions, annotations, elaborations, or other modifications
++      represent, as a whole, an original work of authorship. For the purposes
++      of this License, Derivative Works shall not include works that remain
++      separable from, or merely link (or bind by name) to the interfaces of,
++      the Work and Derivative Works thereof.
++
++      "Contribution" shall mean any work of authorship, including
++      the original version of the Work and any modifications or additions
++      to that Work or Derivative Works thereof, that is intentionally
++      submitted to Licensor for inclusion in the Work by the copyright owner
++      or by an individual or Legal Entity authorized to submit on behalf of
++      the copyright owner. For the purposes of this definition, "submitted"
++      means any form of electronic, verbal, or written communication sent
++      to the Licensor or its representatives, including but not limited to
++      communication on electronic mailing lists, source code control systems,
++      and issue tracking systems that are managed by, or on behalf of, the
++      Licensor for the purpose of discussing and improving the Work, but
++      excluding communication that is conspicuously marked or otherwise
++      designated in writing by the copyright owner as "Not a Contribution."
++
++      "Contributor" shall mean Licensor and any individual or Legal Entity
++      on behalf of whom a Contribution has been received by Licensor and
++      subsequently incorporated within the Work.
++
++   2. Grant of Copyright License. Subject to the terms and conditions of
++      this License, each Contributor hereby grants to You a perpetual,
++      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
++      copyright license to reproduce, prepare Derivative Works of,
++      publicly display, publicly perform, sublicense, and distribute the
++      Work and such Derivative Works in Source or Object form.
++
++   3. Grant of Patent License. Subject to the terms and conditions of
++      this License, each Contributor hereby grants to You a perpetual,
++      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
++      (except as stated in this section) patent license to make, have made,
++      use, offer to sell, sell, import, and otherwise transfer the Work,
++      where such license applies only to those patent claims licensable
++      by such Contributor that are necessarily infringed by their
++      Contribution(s) alone or by combination of their Contribution(s)
++      with the Work to which such Contribution(s) was submitted. If You
++      institute patent litigation against any entity (including a
++      cross-claim or counterclaim in a lawsuit) alleging that the Work
++      or a Contribution incorporated within the Work constitutes direct
++      or contributory patent infringement, then any patent licenses
++      granted to You under this License for that Work shall terminate
++      as of the date such litigation is filed.
++
++   4. Redistribution. You may reproduce and distribute copies of the
++      Work or Derivative Works thereof in any medium, with or without
++      modifications, and in Source or Object form, provided that You
++      meet the following conditions:
++
++      (a) You must give any other recipients of the Work or
++          Derivative Works a copy of this License; and
++
++      (b) You must cause any modified files to carry prominent notices
++          stating that You changed the files; and
++
++      (c) You must retain, in the Source form of any Derivative Works
++          that You distribute, all copyright, patent, trademark, and
++          attribution notices from the Source form of the Work,
++          excluding those notices that do not pertain to any part of
++          the Derivative Works; and
++
++      (d) If the Work includes a "NOTICE" text file as part of its
++          distribution, then any Derivative Works that You distribute must
++          include a readable copy of the attribution notices contained
++          within such NOTICE file, excluding those notices that do not
++          pertain to any part of the Derivative Works, in at least one
++          of the following places: within a NOTICE text file distributed
++          as part of the Derivative Works; within the Source form or
++          documentation, if provided along with the Derivative Works; or,
++          within a display generated by the Derivative Works, if and
++          wherever such third-party notices normally appear. The contents
++          of the NOTICE file are for informational purposes only and
++          do not modify the License. You may add Your own attribution
++          notices within Derivative Works that You distribute, alongside
++          or as an addendum to the NOTICE text from the Work, provided
++          that such additional attribution notices cannot be construed
++          as modifying the License.
++
++      You may add Your own copyright statement to Your modifications and
++      may provide additional or different license terms and conditions
++      for use, reproduction, or distribution of Your modifications, or
++      for any such Derivative Works as a whole, provided Your use,
++      reproduction, and distribution of the Work otherwise complies with
++      the conditions stated in this License.
++
++   5. Submission of Contributions. Unless You explicitly state otherwise,
++      any Contribution intentionally submitted for inclusion in the Work
++      by You to the Licensor shall be under the terms and conditions of
++      this License, without any additional terms or conditions.
++      Notwithstanding the above, nothing herein shall supersede or modify
++      the terms of any separate license agreement you may have executed
++      with Licensor regarding such Contributions.
++
++   6. Trademarks. This License does not grant permission to use the trade
++      names, trademarks, service marks, or product names of the Licensor,
++      except as required for reasonable and customary use in describing the
++      origin of the Work and reproducing the content of the NOTICE file.
++
++   7. Disclaimer of Warranty. Unless required by applicable law or
++      agreed to in writing, Licensor provides the Work (and each
++      Contributor provides its Contributions) on an "AS IS" BASIS,
++      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
++      implied, including, without limitation, any warranties or conditions
++      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
++      PARTICULAR PURPOSE. You are solely responsible for determining the
++      appropriateness of using or redistributing the Work and assume any
++      risks associated with Your exercise of permissions under this License.
++
++   8. Limitation of Liability. In no event and under no legal theory,
++      whether in tort (including negligence), contract, or otherwise,
++      unless required by applicable law (such as deliberate and grossly
++      negligent acts) or agreed to in writing, shall any Contributor be
++      liable to You for damages, including any direct, indirect, special,
++      incidental, or consequential damages of any character arising as a
++      result of this License or out of the use or inability to use the
++      Work (including but not limited to damages for loss of goodwill,
++      work stoppage, computer failure or malfunction, or any and all
++      other commercial damages or losses), even if such Contributor
++      has been advised of the possibility of such damages.
++
++   9. Accepting Warranty or Additional Liability. While redistributing
++      the Work or Derivative Works thereof, You may choose to offer,
++      and charge a fee for, acceptance of support, warranty, indemnity,
++      or other liability obligations and/or rights consistent with this
++      License. However, in accepting such obligations, You may act only
++      on Your own behalf and on Your sole responsibility, not on behalf
++      of any other Contributor, and only if You agree to indemnify,
++      defend, and hold each Contributor harmless for any liability
++      incurred by, or claims asserted against, such Contributor by reason
++      of your accepting any such warranty or additional liability.
++
++   END OF TERMS AND CONDITIONS
++
++   APPENDIX: How to apply the Apache License to your work.
++
++      To apply the Apache License to your work, attach the following
++      boilerplate notice, with the fields enclosed by brackets "[]"
++      replaced with your own identifying information. (Don't include
++      the brackets!)  The text should be enclosed in the appropriate
++      comment syntax for the file format. We also recommend that a
++      file or class name and description of purpose be included on the
++      same "printed page" as the copyright notice for easier
++      identification within third-party archives.
++
++   Copyright [yyyy] [name of copyright owner]
++
++   Licensed under the Apache License, Version 2.0 (the "License");
++   you may not use this file except in compliance with the License.
++   You may obtain a copy of the License at
++
++       http://www.apache.org/licenses/LICENSE-2.0
++
++   Unless required by applicable law or agreed to in writing, software
++   distributed under the License is distributed on an "AS IS" BASIS,
++   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++   See the License for the specific language governing permissions and
++   limitations under the License.
++

http://git-wip-us.apache.org/repos/asf/camel/blob/f1e21c53/components/camel-gridfs/src/main/resources/META-INF/NOTICE.txt
----------------------------------------------------------------------
diff --cc components/camel-gridfs/src/main/resources/META-INF/NOTICE.txt
index 0000000,0000000..2e215bf
new file mode 100644
--- /dev/null
+++ b/components/camel-gridfs/src/main/resources/META-INF/NOTICE.txt
@@@ -1,0 -1,0 +1,11 @@@
++   =========================================================================
++   ==  NOTICE file corresponding to the section 4 d of                    ==
++   ==  the Apache License, Version 2.0,                                   ==
++   ==  in this case for the Apache Camel distribution.                    ==
++   =========================================================================
++
++   This product includes software developed by
++   The Apache Software Foundation (http://www.apache.org/).
++
++   Please read the different LICENSE files present in the licenses directory of
++   this distribution.

http://git-wip-us.apache.org/repos/asf/camel/blob/f1e21c53/components/camel-gridfs/src/main/resources/META-INF/services/org/apache/camel/component/gridfs
----------------------------------------------------------------------
diff --cc components/camel-gridfs/src/main/resources/META-INF/services/org/apache/camel/component/gridfs
index 0000000,0000000..50df682
new file mode 100644
--- /dev/null
+++ b/components/camel-gridfs/src/main/resources/META-INF/services/org/apache/camel/component/gridfs
@@@ -1,0 -1,0 +1,18 @@@
++#
++# Licensed to the Apache Software Foundation (ASF) under one or more
++# contributor license agreements.  See the NOTICE file distributed with
++# this work for additional information regarding copyright ownership.
++# The ASF licenses this file to You under the Apache License, Version 2.0
++# (the "License"); you may not use this file except in compliance with
++# the License.  You may obtain a copy of the License at
++#
++# http://www.apache.org/licenses/LICENSE-2.0
++#
++# Unless required by applicable law or agreed to in writing, software
++# distributed under the License is distributed on an "AS IS" BASIS,
++# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++# See the License for the specific language governing permissions and
++# limitations under the License.
++#
++
++class=org.apache.camel.component.gridfs.GridFsComponent

http://git-wip-us.apache.org/repos/asf/camel/blob/f1e21c53/components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java
----------------------------------------------------------------------
diff --cc components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java
index 0000000,0000000..f4c2bff
new file mode 100644
--- /dev/null
+++ b/components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java
@@@ -1,0 -1,0 +1,62 @@@
++/**
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *      http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++package org.apache.camel.component.gridfs;
++
++
++import com.mongodb.MongoClient;
++import com.mongodb.gridfs.GridFS;
++
++import org.apache.camel.CamelContext;
++import org.apache.camel.component.properties.PropertiesComponent;
++import org.apache.camel.spring.SpringCamelContext;
++import org.apache.camel.test.junit4.CamelTestSupport;
++import org.springframework.context.ApplicationContext;
++import org.springframework.context.annotation.AnnotationConfigApplicationContext;
++
++public abstract class AbstractMongoDbTest extends CamelTestSupport {
++
++    protected MongoClient mongo;
++    protected GridFS gridfs;
++
++    protected ApplicationContext applicationContext;
++
++    @SuppressWarnings("deprecation")
++    @Override
++    public void doPostSetup() {
++        mongo = applicationContext.getBean(MongoClient.class);
++        gridfs = new GridFS(mongo.getDB("test"), getBucket());
++    }
++
++    public String getBucket() {
++        return this.getClass().getSimpleName();
++    }
++    
++    @Override
++    public void tearDown() throws Exception {
++        super.tearDown();
++        mongo.close();
++    }
++
++    @Override
++    protected CamelContext createCamelContext() throws Exception {
++        applicationContext = new AnnotationConfigApplicationContext(EmbedMongoConfiguration.class);
++        CamelContext ctx = new SpringCamelContext(applicationContext);
++        PropertiesComponent pc = new PropertiesComponent("classpath:mongodb.test.properties");
++        ctx.addComponent("properties", pc);
++        return ctx;
++    }
++}

http://git-wip-us.apache.org/repos/asf/camel/blob/f1e21c53/components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/EmbedMongoConfiguration.java
----------------------------------------------------------------------
diff --cc components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/EmbedMongoConfiguration.java
index 0000000,0000000..d755a45
new file mode 100644
--- /dev/null
+++ b/components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/EmbedMongoConfiguration.java
@@@ -1,0 -1,0 +1,58 @@@
++/**
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *      http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++package org.apache.camel.component.gridfs;
++
++import java.io.IOException;
++import java.net.UnknownHostException;
++
++import com.mongodb.MongoClient;
++import de.flapdoodle.embed.mongo.MongodExecutable;
++import de.flapdoodle.embed.mongo.MongodStarter;
++import de.flapdoodle.embed.mongo.config.IMongodConfig;
++import de.flapdoodle.embed.mongo.config.MongodConfigBuilder;
++import de.flapdoodle.embed.mongo.config.Net;
++import org.springframework.context.annotation.Bean;
++import org.springframework.context.annotation.Configuration;
++
++import static de.flapdoodle.embed.mongo.distribution.Version.Main.PRODUCTION;
++import static de.flapdoodle.embed.process.runtime.Network.localhostIsIPv6;
++import static org.springframework.util.SocketUtils.findAvailableTcpPort;
++
++@Configuration
++public class EmbedMongoConfiguration {
++
++    private static final int PORT = findAvailableTcpPort();
++
++    static {
++        try {
++            IMongodConfig mongodConfig = new MongodConfigBuilder()
++                    .version(PRODUCTION)
++                    .net(new Net(PORT, localhostIsIPv6()))
++                    .build();
++            MongodExecutable mongodExecutable = MongodStarter.getDefaultInstance().prepare(mongodConfig);
++            mongodExecutable.start();
++        } catch (IOException e) {
++            throw new RuntimeException(e);
++        }
++    }
++
++    @Bean
++    public MongoClient myDb() throws UnknownHostException {
++        return new MongoClient("0.0.0.0", PORT);
++    }
++
++}

http://git-wip-us.apache.org/repos/asf/camel/blob/f1e21c53/components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java
----------------------------------------------------------------------
diff --cc components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java
index 0000000,0000000..77b1c6e
new file mode 100644
--- /dev/null
+++ b/components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java
@@@ -1,0 -1,0 +1,101 @@@
++/**
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements. See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership. The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License. You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied. See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++
++package org.apache.camel.component.gridfs;
++
++import java.util.HashMap;
++import java.util.Map;
++
++import com.mongodb.gridfs.GridFS;
++
++import org.apache.camel.Exchange;
++import org.apache.camel.builder.RouteBuilder;
++import org.apache.camel.component.mock.MockEndpoint;
++
++import org.junit.Test;
++
++/**
++ * 
++ */
++public class GridFsConsumerTest extends AbstractMongoDbTest {
++    @Override
++    protected RouteBuilder createRouteBuilder() throws Exception {
++        return new RouteBuilder() {
++            public void configure() {
++                from("direct:create").to("gridfs:myDb?database={{mongodb.testDb}}&operation=create&bucket=" + getBucket());
++                from("direct:create-a").to("gridfs:myDb?database={{mongodb.testDb}}&operation=create&bucket=" + getBucket() + "-a");
++                from("direct:create-pts").to("gridfs:myDb?database={{mongodb.testDb}}&operation=create&bucket=" + getBucket() + "-pts");
++                
++                from("gridfs:myDb?database={{mongodb.testDb}}&bucket=" + getBucket()).convertBodyTo(String.class).to("mock:test");
++                from("gridfs:myDb?database={{mongodb.testDb}}&bucket=" + getBucket() + "-a&queryStrategy=FileAttribute")
++                    .convertBodyTo(String.class).to("mock:test");
++                from("gridfs:myDb?database={{mongodb.testDb}}&bucket=" + getBucket() + "-pts&queryStrategy=PersistentTimestamp")
++                    .convertBodyTo(String.class).to("mock:test");
++            }
++        };
++    }
++    
++    
++    @Test
++    public void testTimestamp() throws Exception {
++        runTest("direct:create", gridfs);
++    }
++    @Test
++    @SuppressWarnings("deprecation")
++    public void testAttribute() throws Exception {
++        runTest("direct:create-a", new GridFS(mongo.getDB("test"), getBucket() + "-a"));
++    }
++    
++    @Test
++    @SuppressWarnings("deprecation")
++    public void testPersistentTS() throws Exception {
++        runTest("direct:create-pts", new GridFS(mongo.getDB("test"), getBucket() + "-pts"));
++    }
++    
++    public void runTest(String target, GridFS gridfs) throws Exception {
++        MockEndpoint mock = getMockEndpoint("mock:test");
++        String data = "This is some stuff to go into the db";
++        mock.expectedMessageCount(1);
++        mock.expectedBodiesReceived(data);
++        
++        Map<String, Object> headers = new HashMap<String, Object>();
++        String fn = "filename.for.db.txt";
++        assertEquals(0, gridfs.find(fn).size());
++        
++        headers.put(Exchange.FILE_NAME, fn);
++        template.requestBodyAndHeaders(target, data, headers);
++        
++        mock.assertIsSatisfied();
++        mock.reset();
++        
++        mock.expectedMessageCount(3);
++        mock.expectedBodiesReceived(data, data, data);
++        
++        headers.put(Exchange.FILE_NAME, fn + "_1");
++        template.requestBodyAndHeaders(target, data, headers);
++        headers.put(Exchange.FILE_NAME, fn + "_2");
++        template.requestBodyAndHeaders(target, data, headers);
++        headers.put(Exchange.FILE_NAME, fn + "_3");
++        template.requestBodyAndHeaders(target, data, headers);
++        mock.assertIsSatisfied();
++        Thread.sleep(1000);
++        mock.assertIsSatisfied();
++    }
++
++}

http://git-wip-us.apache.org/repos/asf/camel/blob/f1e21c53/components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/GridFsProducerOperationsTest.java
----------------------------------------------------------------------
diff --cc components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/GridFsProducerOperationsTest.java
index 0000000,0000000..df7882d
new file mode 100644
--- /dev/null
+++ b/components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/GridFsProducerOperationsTest.java
@@@ -1,0 -1,0 +1,74 @@@
++/**
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *      http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++package org.apache.camel.component.gridfs;
++
++import java.io.InputStream;
++import java.util.HashMap;
++import java.util.Map;
++
++import org.apache.camel.Exchange;
++import org.apache.camel.builder.RouteBuilder;
++import org.junit.Test;
++
++public class GridFsProducerOperationsTest extends AbstractMongoDbTest {
++    
++    @Override
++    protected RouteBuilder createRouteBuilder() throws Exception {
++        return new RouteBuilder() {
++            public void configure() {
++                from("direct:create").to("gridfs:myDb?database={{mongodb.testDb}}&operation=create&bucket=" + getBucket());
++                from("direct:remove").to("gridfs:myDb?database={{mongodb.testDb}}&operation=remove&bucket=" + getBucket());
++                from("direct:findOne").to("gridfs:myDb?database={{mongodb.testDb}}&operation=findOne&bucket=" + getBucket());
++                from("direct:listAll").to("gridfs:myDb?database={{mongodb.testDb}}&operation=listAll&bucket=" + getBucket());
++                from("direct:count").to("gridfs:myDb?database={{mongodb.testDb}}&operation=count&bucket=" + getBucket());
++                from("direct:headerOp").to("gridfs:myDb?database={{mongodb.testDb}}&bucket=" + getBucket());
++            }
++        };
++    }
++    
++    @Test
++    public void testOperations() throws Exception {
++        Map<String, Object> headers = new HashMap<String, Object>();
++        String fn = "filename.for.db.txt";
++        assertEquals(0, gridfs.find(fn).size());
++        
++        headers.put(Exchange.FILE_NAME, fn);
++        String data = "This is some stuff to go into the db";
++        template.requestBodyAndHeaders("direct:create", data, headers);
++        assertEquals(1, gridfs.find(fn).size());
++        assertEquals(1, template.requestBodyAndHeaders("direct:count", null, headers));
++        InputStream ins = template.requestBodyAndHeaders("direct:findOne", null, headers, InputStream.class);
++        assertNotNull(ins);
++        byte b[] = new byte[2048];
++        int i = ins.read(b);
++        assertEquals(data, new String(b, 0, i, "utf-8"));
++        
++        headers.put(Exchange.FILE_NAME, "2-" + fn);
++        
++        template.requestBodyAndHeaders("direct:create", data + "data2", headers);
++        assertEquals(1, template.requestBodyAndHeaders("direct:count", null, headers));
++        assertEquals(2, template.requestBody("direct:count", null, Integer.class).intValue());
++        
++        String s = template.requestBody("direct:listAll", null, String.class);
++        assertTrue(s.contains("2-" + fn));
++        template.requestBodyAndHeaders("direct:remove", null, headers);
++        assertEquals(1, template.requestBody("direct:count", null, Integer.class).intValue());
++        s = template.requestBody("direct:listAll", null, String.class);
++        assertFalse(s.contains("2-" + fn));
++    }
++}
++

http://git-wip-us.apache.org/repos/asf/camel/blob/f1e21c53/components/camel-gridfs/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --cc components/camel-gridfs/src/test/resources/log4j.properties
index 0000000,0000000..cb64298
new file mode 100644
--- /dev/null
+++ b/components/camel-gridfs/src/test/resources/log4j.properties
@@@ -1,0 -1,0 +1,37 @@@
++#
++# Licensed to the Apache Software Foundation (ASF) under one or more
++# contributor license agreements.  See the NOTICE file distributed with
++# this work for additional information regarding copyright ownership.
++# The ASF licenses this file to You under the Apache License, Version 2.0
++# (the "License"); you may not use this file except in compliance with
++# the License.  You may obtain a copy of the License at
++#
++# http://www.apache.org/licenses/LICENSE-2.0
++#
++# Unless required by applicable law or agreed to in writing, software
++# distributed under the License is distributed on an "AS IS" BASIS,
++# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++# See the License for the specific language governing permissions and
++# limitations under the License.
++#
++
++log4j.rootLogger=INFO, file
++# change the logging level of this category to increase verbosity of the MongoDB component
++log4j.category.org.apache.camel.component.mongodb=INFO, file
++log4j.additivity.org.apache.camel.component.mongodb=false
++
++# uncomment the following line to turn on Camel debugging
++#log4j.logger.org.apache.camel=DEBUG
++
++# CONSOLE appender not used by default
++log4j.appender.out=org.apache.log4j.ConsoleAppender
++log4j.appender.out.layout=org.apache.log4j.PatternLayout
++log4j.appender.out.layout.ConversionPattern=[%30.30t] %-30.30c{1} %-5p %m%n
++#log4j.appender.out.layout.ConversionPattern=%d [%-15.15t] %-5p %-30.30c{1} - %m%n
++
++
++# File appender
++log4j.appender.file=org.apache.log4j.FileAppender
++log4j.appender.file.layout=org.apache.log4j.PatternLayout
++log4j.appender.file.layout.ConversionPattern=%d [%-15.15t] %-5p %-30.30c{1} - %m%n
++log4j.appender.file.file=target/camel-mongodb-test.log

http://git-wip-us.apache.org/repos/asf/camel/blob/f1e21c53/components/camel-gridfs/src/test/resources/mongodb.test.properties
----------------------------------------------------------------------
diff --cc components/camel-gridfs/src/test/resources/mongodb.test.properties
index 0000000,0000000..20c529d
new file mode 100644
--- /dev/null
+++ b/components/camel-gridfs/src/test/resources/mongodb.test.properties
@@@ -1,0 -1,0 +1,21 @@@
++#
++# Licensed to the Apache Software Foundation (ASF) under one or more
++# contributor license agreements.  See the NOTICE file distributed with
++# this work for additional information regarding copyright ownership.
++# The ASF licenses this file to You under the Apache License, Version 2.0
++# (the "License"); you may not use this file except in compliance with
++# the License.  You may obtain a copy of the License at
++#
++# http://www.apache.org/licenses/LICENSE-2.0
++#
++# Unless required by applicable law or agreed to in writing, software
++# distributed under the License is distributed on an "AS IS" BASIS,
++# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++# See the License for the specific language governing permissions and
++# limitations under the License.
++#
++
++mongodb.connectionURI=mongodb://localhost:27017
++mongodb.testDb=test
++mongodb.testCollection=camelTest
++mongodb.cappedTestCollection=camelTestCapped


[2/9] camel git commit: [CAMEL-9659] GridFs producer working with junit test

Posted by dk...@apache.org.
[CAMEL-9659] GridFs producer working with junit test


Project: http://git-wip-us.apache.org/repos/asf/camel/repo
Commit: http://git-wip-us.apache.org/repos/asf/camel/commit/bfaec784
Tree: http://git-wip-us.apache.org/repos/asf/camel/tree/bfaec784
Diff: http://git-wip-us.apache.org/repos/asf/camel/diff/bfaec784

Branch: refs/heads/master
Commit: bfaec78409732532b68aff932fcbe0441fbd6559
Parents: 4c78e44
Author: Daniel Kulp <dk...@apache.org>
Authored: Fri Feb 26 15:14:02 2016 -0500
Committer: Daniel Kulp <dk...@apache.org>
Committed: Tue Mar 1 14:22:48 2016 -0500

----------------------------------------------------------------------
 pom.xml                                         |  15 +-
 .../camel/component/gridfs/GridFsComponent.java |  14 +-
 .../camel/component/gridfs/GridFsConsumer.java  |  40 ++++
 .../camel/component/gridfs/GridFsEndpoint.java  | 185 ++++++++++++++-----
 .../camel/component/gridfs/GridFsProducer.java  | 153 ++++++++++-----
 .../component/gridfs/AbstractMongoDbTest.java   |  53 ++++++
 .../gridfs/EmbedMongoConfiguration.java         |  58 ++++++
 .../gridfs/GridFsConsumerOperationsTest.java    |  74 ++++++++
 src/test/resources/log4j.properties             |  37 ++++
 src/test/resources/mongodb.test.properties      |  21 +++
 10 files changed, 542 insertions(+), 108 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 5b1e1a1..ade40e7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -24,7 +24,7 @@
     <parent>
         <groupId>org.apache.camel</groupId>
         <artifactId>components</artifactId>
-        <version>2.13.2</version>
+        <version>2.16.2</version>
     </parent>
 
     <artifactId>camel-gridfs</artifactId>
@@ -52,9 +52,18 @@
         <dependency>
             <groupId>org.mongodb</groupId>
             <artifactId>mongo-java-driver</artifactId>
-            <version>${mongo-java-driver-version}</version>
+            <version>3.2.2</version>
+        </dependency>
+        <dependency>
+            <groupId>de.flapdoodle.embed</groupId>
+            <artifactId>de.flapdoodle.embed.mongo</artifactId>
+            <scope>test</scope>
+        </dependency>
+	<dependency>
+          <groupId>org.apache.camel</groupId>
+          <artifactId>camel-test-spring</artifactId>
+          <scope>test</scope>
         </dependency>
-
     </dependencies>
 
 </project>

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java b/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
index 87d5394..26da915 100644
--- a/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
@@ -18,29 +18,35 @@ package org.apache.camel.component.gridfs;
 
 import com.mongodb.Mongo;
 import org.apache.camel.Endpoint;
-import org.apache.camel.impl.DefaultComponent;
+import org.apache.camel.impl.UriEndpointComponent;
 import org.apache.camel.util.CamelContextHelper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Map;
 
-public class GridFsComponent extends DefaultComponent {
+public class GridFsComponent extends UriEndpointComponent {
 
     private final static Logger LOG = LoggerFactory.getLogger(GridFsComponent.class);
 
     private volatile Mongo db;
 
+    public GridFsComponent() {
+        super(GridFsEndpoint.class);
+    }
+    
     protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
         if (db == null) {
             db = CamelContextHelper.mandatoryLookup(getCamelContext(), remaining, Mongo.class);
             LOG.debug("Resolved the connection with the name {} as {}", remaining, db);
         }
 
-        Endpoint endpoint = new GridFsEndpoint(uri, this);
+        GridFsEndpoint endpoint = new GridFsEndpoint(uri, this);
         parameters.put("mongoConnection", db);
+        endpoint.setConnectionBean(remaining);
+        endpoint.setMongoConnection(db);
         setProperties(endpoint, parameters);
-
+        
         return endpoint;
     }
 

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java b/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
new file mode 100644
index 0000000..dce195a
--- /dev/null
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.camel.component.gridfs;
+
+import org.apache.camel.Processor;
+import org.apache.camel.impl.DefaultConsumer;
+
+/**
+ * 
+ */
+public class GridFsConsumer extends DefaultConsumer {
+    GridFsEndpoint ep;
+    
+    /**
+     * @param endpoint
+     * @param processor
+     */
+    public GridFsConsumer(GridFsEndpoint endpoint, Processor processor) {
+        super(endpoint, processor);
+        ep = endpoint;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java b/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
index d630160..cef109a 100644
--- a/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
@@ -17,70 +17,125 @@
 package org.apache.camel.component.gridfs;
 
 import com.mongodb.DB;
-import com.mongodb.DBCollection;
 import com.mongodb.Mongo;
+import com.mongodb.ReadPreference;
+import com.mongodb.WriteConcern;
 import com.mongodb.gridfs.GridFS;
 import org.apache.camel.Consumer;
 import org.apache.camel.Processor;
 import org.apache.camel.Producer;
 import org.apache.camel.impl.DefaultEndpoint;
+import org.apache.camel.spi.Metadata;
+import org.apache.camel.spi.UriEndpoint;
+import org.apache.camel.spi.UriParam;
+import org.apache.camel.spi.UriPath;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+@UriEndpoint(scheme = "gridfs", title = "MongoDBGridFS", syntax = "gridfs:connectionBean", 
+            label = "database,nosql")
 public class GridFsEndpoint extends DefaultEndpoint {
 
     private static final Logger LOG = LoggerFactory.getLogger(GridFsEndpoint.class);
 
-    private Mongo mongoConnection;
+    @UriPath @Metadata(required = "true")
+    private String connectionBean;
+    @UriParam
     private String database;
-    private String colCounters;
-    private String colTP;
-    private DBCollection dbColCounters;
-    private DBCollection dbColTP;
+    @UriParam
+    private String bucket;
+    @UriParam(enums = "ACKNOWLEDGED,W1,W2,W3,UNACKNOWLEDGED,JOURNALED,MAJORITY,SAFE")
+    private WriteConcern writeConcern;
+    @UriParam
+    private WriteConcern writeConcernRef;
+    @UriParam
+    private ReadPreference readPreference;
+    @UriParam
+    private String operation;
+
+    
+    private Mongo mongoConnection;
     private DB db;
     private GridFS gridFs;
 
-    public GridFsEndpoint() { }
-
     public GridFsEndpoint(String uri, GridFsComponent component) {
         super(uri, component);
     }
 
-    public GridFsEndpoint(String endpointUri) {
-        super(endpointUri);
-    }
-
+    @Override
     public Producer createProducer() throws Exception {
         initializeConnection();
         return new GridFsProducer(this);
     }
 
-    public Consumer createConsumer(Processor processor) {
-        return null;
+    @Override
+    public Consumer createConsumer(Processor processor) throws Exception {
+        initializeConnection();
+        return new GridFsConsumer(this, processor);
     }
 
     public boolean isSingleton() {
         return true;
     }
 
+    @SuppressWarnings("deprecation")
     public void initializeConnection() throws Exception {
         LOG.info("Initialize GridFS endpoint: {}", this.toString());
-        if (database == null || colCounters == null || colTP == null) {
-            throw new IllegalStateException("Missing required endpoint configuration: database and/or colCounters and/or colTP");
+        if (database == null) {
+            throw new IllegalStateException("Missing required endpoint configuration: database");
         }
         db = mongoConnection.getDB(database);
         if (db == null) {
             throw new IllegalStateException("Could not initialize GridFsComponent. Database " + database + " does not exist.");
         }
-        dbColCounters = db.getCollection(colCounters);
-        dbColTP = db.getCollection(colTP);
-        gridFs = new GridFS(db);
+        gridFs = new GridFS(db, bucket == null ? GridFS.DEFAULT_BUCKET : bucket);
     }
 
+    
+    @Override
+    protected void doStart() throws Exception {
+        if (writeConcern != null && writeConcernRef != null) {
+            String msg = "Cannot set both writeConcern and writeConcernRef at the same time. Respective values: " + writeConcern
+                    + ", " + writeConcernRef + ". Aborting initialization.";
+            throw new IllegalArgumentException(msg);
+        }
+
+        setWriteReadOptionsOnConnection();
+        super.doStart();
+    }
+    private void setWriteReadOptionsOnConnection() {
+        // Set the WriteConcern
+        if (writeConcern != null) {
+            mongoConnection.setWriteConcern(writeConcern);
+        } else if (writeConcernRef != null) {
+            mongoConnection.setWriteConcern(writeConcernRef);
+        }
+
+        // Set the ReadPreference
+        if (readPreference != null) {
+            mongoConnection.setReadPreference(readPreference);
+        }
+    }
+    
+    
+    
+    
+    // ======= Getters and setters ===============================================
+
+
+    public String getConnectionBean() {
+        return connectionBean;
+    }
+    /**
+     * Name of {@link com.mongodb.Mongo} to use.
+     */
+    public void setConnectionBean(String connectionBean) {
+        this.connectionBean = connectionBean;
+    }
+    
     public Mongo getMongoConnection() {
         return mongoConnection;
     }
-
     public void setMongoConnection(Mongo mongoConnection) {
         this.mongoConnection = mongoConnection;
     }
@@ -88,49 +143,79 @@ public class GridFsEndpoint extends DefaultEndpoint {
     public String getDatabase() {
         return database;
     }
-
     public void setDatabase(String database) {
         this.database = database;
     }
+    public String getBucket() {
+        return bucket;
+    }
+    public void setBucket(String bucket) {
+        this.bucket = bucket;
+    }
+    
+    /**
+     * Set the {@link WriteConcern} for write operations on MongoDB using the standard ones.
+     * Resolved from the fields of the WriteConcern class by calling the {@link WriteConcern#valueOf(String)} method.
+     * 
+     * @param writeConcern the standard name of the WriteConcern
+     * @see <a href="http://api.mongodb.org/java/current/com/mongodb/WriteConcern.html#valueOf(java.lang.String)">possible options</a>
+     */
+    public void setWriteConcern(String writeConcern) {
+        this.writeConcern = WriteConcern.valueOf(writeConcern);
+    }
+
+    public WriteConcern getWriteConcern() {
+        return writeConcern;
+    }
+
+    /**
+     * Set the {@link WriteConcern} for write operations on MongoDB, passing in the bean ref to a custom WriteConcern which exists in the Registry.
+     * You can also use standard WriteConcerns by passing in their key. See the {@link #setWriteConcern(String) setWriteConcern} method.
+     * 
+     * @param writeConcernRef the name of the bean in the registry that represents the WriteConcern to use
+     */
+    public void setWriteConcernRef(String writeConcernRef) {
+        WriteConcern wc = this.getCamelContext().getRegistry().lookupByNameAndType(writeConcernRef, WriteConcern.class);
+        if (wc == null) {
+            String msg = "Camel MongoDB component could not find the WriteConcern in the Registry. Verify that the "
+                    + "provided bean name (" + writeConcernRef + ")  is correct. Aborting initialization.";
+            throw new IllegalArgumentException(msg);
+        }
 
-    public String getColCounters() {
-        return colCounters;
-    }
-
-    public void setColCounters(String colCounters) {
-        this.colCounters = colCounters;
-    }
-
-    public String getColTP() {
-        return colTP;
-    }
-
-    public void setColTP(String colTP) {
-        this.colTP = colTP;
-    }
-
-    public DBCollection getDbColCounters() {
-        return dbColCounters;
+        this.writeConcernRef = wc;
     }
 
-    public void setDbColCounters(DBCollection dbColCounters) {
-        this.dbColCounters = dbColCounters;
+    public WriteConcern getWriteConcernRef() {
+        return writeConcernRef;
     }
 
-    public DBCollection getDbColTP() {
-        return dbColTP;
+    /** 
+     * Sets a MongoDB {@link ReadPreference} on the Mongo connection. Read preferences set directly on the connection will be
+     * overridden by this setting.
+     * <p/>
+     * The {@link com.mongodb.ReadPreference#valueOf(String)} utility method is used to resolve the passed {@code readPreference}
+     * value. Some examples for the possible values are {@code nearest}, {@code primary} or {@code secondary} etc.
+     * 
+     * @param readPreference the name of the read preference to set
+     */
+    public void setReadPreference(String readPreference) {
+        this.readPreference = ReadPreference.valueOf(readPreference);
     }
 
-    public void setDbColTP(DBCollection dbColTP) {
-        this.dbColTP = dbColTP;
+    public ReadPreference getReadPreference() {
+        return readPreference;
     }
-
-    public DB getDb() {
-        return db;
+    
+    
+    /**
+     * Sets the operation this endpoint will execute against GridRS.
+     */
+    public void setOperation(String operation) {
+        this.operation = operation;
     }
 
-    public void setDb(DB db) {
-        this.db = db;
+    public String getOperation() {
+        return operation;
     }
 
     public GridFS getGridFs() {

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java b/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
index e76af23..5178220 100644
--- a/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
@@ -16,19 +16,22 @@
  */
 package org.apache.camel.component.gridfs;
 
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Reader;
+
+import com.mongodb.BasicDBObject;
+import com.mongodb.DBCursor;
 import com.mongodb.DBObject;
+import com.mongodb.gridfs.GridFSDBFile;
 import com.mongodb.gridfs.GridFSInputFile;
 import com.mongodb.util.JSON;
 import org.apache.camel.Exchange;
 import org.apache.camel.impl.DefaultProducer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
-import java.io.File;
 
 public class GridFsProducer extends DefaultProducer {
-
-    private static final Logger LOG = LoggerFactory.getLogger(GridFsProducer.class);
     private GridFsEndpoint endpoint;
 
     public GridFsProducer(GridFsEndpoint endpoint) {
@@ -37,54 +40,102 @@ public class GridFsProducer extends DefaultProducer {
     }
 
     public void process(Exchange exchange) throws Exception {
-        // set DBObject for query
-        DBObject dbObjQuery = (DBObject) JSON.parse("{_id:'inventory'}");
-
-        // set DBObject for update
-        DBObject dbObjUpdate = (DBObject) JSON.parse("{$inc:{seq:1}}");
-
-        // get inventoryID
-        DBObject invID = endpoint.getDbColCounters().findAndModify(dbObjQuery, dbObjUpdate);
-
-        // get the in message body
-        String TPID = exchange.getIn().getBody().toString();
-
-        // TODO set generic
-        // specific: get trading partner name, load_type, do_legacy
-        DBObject dbObjTPQuery = (DBObject) JSON.parse("{'tpid':'" + TPID + "'}");
-        DBObject tpName = endpoint.getDbColTP().findOne(dbObjTPQuery);
-
-        // set the tpName and tpLoadType in the headers
-        exchange.getIn().setHeader("tpName", tpName.get("name").toString());
-        exchange.getIn().setHeader("tpLoadType", tpName.get("load_type").toString());
-        // most won't have do_legacy, so catch error and default to 'Y'
-        try {
-            exchange.getIn().setHeader("tpDoLegacy", tpName.get("do_legacy").toString());
-        } catch (Exception e) {
-            exchange.getIn().setHeader("tpDoLegacy", "Y");
+        String operation = endpoint.getOperation();
+        if (operation == null) {
+            operation = exchange.getIn().getHeader("gridfs.operation", String.class);
         }
+        if (operation == null || "create".equals(operation)) {
+            final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
+            Long chunkSize = exchange.getIn().getHeader("gridfs.chunksize", Long.class);
+
+            InputStream ins = exchange.getIn().getMandatoryBody(InputStream.class);
+            GridFSInputFile gfsFile = endpoint.getGridFs().createFile(ins, filename, true);
+            if (chunkSize != null && chunkSize > 0) {
+                gfsFile.setChunkSize(chunkSize);
+            }
+            final String ct = exchange.getIn().getHeader(Exchange.CONTENT_TYPE, String.class);
+            if (ct != null) {
+                gfsFile.setContentType(ct);
+            }
+            String metaData = exchange.getIn().getHeader("gridfs.metadata", String.class);
+            DBObject dbObject = (DBObject) JSON.parse(metaData);
+            gfsFile.setMetaData(dbObject);
+            gfsFile.save();
+            exchange.getIn().setHeader(Exchange.FILE_NAME_PRODUCED, gfsFile.getFilename());
+        } else if ("remove".equals(operation)) {
+            final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
+            endpoint.getGridFs().remove(filename);
+        } else if ("findOne".equals(operation)) {
+            final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
+            GridFSDBFile file = endpoint.getGridFs().findOne(filename);
+            if (file != null) {
+                exchange.getIn().setBody(file.getInputStream(), InputStream.class);
+            } else {
+                throw new FileNotFoundException("No GridFS file for " + filename);
+            }
+        } else if ("listAll".equals(operation)) {
+            final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
+            DBCursor cursor;
+            if (filename == null) {
+                cursor = endpoint.getGridFs().getFileList();
+            } else {
+                cursor = endpoint.getGridFs().getFileList(new BasicDBObject("filename", filename));
+            }
+            exchange.getIn().setBody(new DBCursorFilenameReader(cursor), Reader.class);
+        } else if ("count".equals(operation)) {
+            final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
+            DBCursor cursor;
+            if (filename == null) {
+                cursor = endpoint.getGridFs().getFileList();
+            } else {
+                cursor = endpoint.getGridFs().getFileList(new BasicDBObject("filename", filename));
+            }
+            exchange.getIn().setBody(cursor.count(), Integer.class);
+        } 
+        
+    }
 
-        // save the TPID for move
-        exchange.getIn().setHeader("TPID", TPID);
-
-        String sInv = invID.get("seq").toString();
-        // strip off decimal
-        sInv = sInv.substring(0, sInv.lastIndexOf("."));
-        exchange.getIn().setHeader("mInv", sInv);
-
-        File file = new File(exchange.getIn().getHeader("gridFsInputFile").toString());
-        GridFSInputFile gfsFile = endpoint.getGridFs().createFile(file);
-
-        // set filename
-        gfsFile.setFilename(exchange.getIn().getHeader("gridFsFileName").toString());
-
-        // add metadata
-        String metaData = "{'inventoryID':" + sInv + ", 'TPID':'" + TPID + "', 'doc_type':'original', 'status':'initial_save'}";
-        DBObject dbObject = (DBObject) JSON.parse(metaData);
-        gfsFile.setMetaData(dbObject);
+    
+    private class DBCursorFilenameReader extends Reader {
+        DBCursor cursor;
+        StringBuilder current;
+        int pos;
+        
+        DBCursorFilenameReader(DBCursor c) {
+            cursor = c;
+            current = new StringBuilder(4096);
+            pos = 0;
+            fill();
+        }
+        void fill() {
+            if (pos > 0) {
+                current.delete(0, pos);
+                pos = 0;
+            }
+            while (cursor.hasNext() && current.length() < 4000) {
+                DBObject o = cursor.next();
+                current.append(o.get("filename")).append("\n");
+            }
+        }
+        @Override
+        public int read(char[] cbuf, int off, int len) throws IOException {
+            if (pos == current.length()) {
+                fill();
+            }
+            if (pos == current.length()) {
+                return -1;
+            }
+            if (len > (current.length() - pos)) {
+                len = current.length() - pos;
+            }
+            current.getChars(pos, pos + len, cbuf, off);
+            pos += len;
+            return len;
+        }
 
-        // save the input file into mongoDB
-        gfsFile.save();
+        @Override
+        public void close() throws IOException {
+            cursor.close();
+        }
     }
-
 }

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java b/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java
new file mode 100644
index 0000000..b1c94b9
--- /dev/null
+++ b/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.gridfs;
+
+
+import com.mongodb.MongoClient;
+import com.mongodb.gridfs.GridFS;
+
+import org.apache.camel.CamelContext;
+import org.apache.camel.component.properties.PropertiesComponent;
+import org.apache.camel.spring.SpringCamelContext;
+import org.apache.camel.test.junit4.CamelTestSupport;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+public abstract class AbstractMongoDbTest extends CamelTestSupport {
+
+    protected static MongoClient mongo;
+    protected static GridFS gridfs;
+
+    protected ApplicationContext applicationContext;
+
+    @SuppressWarnings("deprecation")
+    @Override
+    public void doPostSetup() {
+        mongo = applicationContext.getBean(MongoClient.class);
+        gridfs = new GridFS(mongo.getDB("test"));
+    }
+
+
+    @Override
+    protected CamelContext createCamelContext() throws Exception {
+        applicationContext = new AnnotationConfigApplicationContext(EmbedMongoConfiguration.class);
+        CamelContext ctx = new SpringCamelContext(applicationContext);
+        PropertiesComponent pc = new PropertiesComponent("classpath:mongodb.test.properties");
+        ctx.addComponent("properties", pc);
+        return ctx;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/test/java/org/apache/camel/component/gridfs/EmbedMongoConfiguration.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/camel/component/gridfs/EmbedMongoConfiguration.java b/src/test/java/org/apache/camel/component/gridfs/EmbedMongoConfiguration.java
new file mode 100644
index 0000000..d755a45
--- /dev/null
+++ b/src/test/java/org/apache/camel/component/gridfs/EmbedMongoConfiguration.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.gridfs;
+
+import java.io.IOException;
+import java.net.UnknownHostException;
+
+import com.mongodb.MongoClient;
+import de.flapdoodle.embed.mongo.MongodExecutable;
+import de.flapdoodle.embed.mongo.MongodStarter;
+import de.flapdoodle.embed.mongo.config.IMongodConfig;
+import de.flapdoodle.embed.mongo.config.MongodConfigBuilder;
+import de.flapdoodle.embed.mongo.config.Net;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import static de.flapdoodle.embed.mongo.distribution.Version.Main.PRODUCTION;
+import static de.flapdoodle.embed.process.runtime.Network.localhostIsIPv6;
+import static org.springframework.util.SocketUtils.findAvailableTcpPort;
+
+@Configuration
+public class EmbedMongoConfiguration {
+
+    private static final int PORT = findAvailableTcpPort();
+
+    static {
+        try {
+            IMongodConfig mongodConfig = new MongodConfigBuilder()
+                    .version(PRODUCTION)
+                    .net(new Net(PORT, localhostIsIPv6()))
+                    .build();
+            MongodExecutable mongodExecutable = MongodStarter.getDefaultInstance().prepare(mongodConfig);
+            mongodExecutable.start();
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    @Bean
+    public MongoClient myDb() throws UnknownHostException {
+        return new MongoClient("0.0.0.0", PORT);
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerOperationsTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerOperationsTest.java b/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerOperationsTest.java
new file mode 100644
index 0000000..8aaa0de
--- /dev/null
+++ b/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerOperationsTest.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.gridfs;
+
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.builder.RouteBuilder;
+import org.junit.Test;
+
+public class GridFsConsumerOperationsTest extends AbstractMongoDbTest {
+    
+    @Override
+    protected RouteBuilder createRouteBuilder() throws Exception {
+        return new RouteBuilder() {
+            public void configure() {
+                from("direct:create").to("gridfs:myDb?database={{mongodb.testDb}}&operation=create");
+                from("direct:remove").to("gridfs:myDb?database={{mongodb.testDb}}&operation=remove");
+                from("direct:findOne").to("gridfs:myDb?database={{mongodb.testDb}}&operation=findOne");
+                from("direct:listAll").to("gridfs:myDb?database={{mongodb.testDb}}&operation=listAll");
+                from("direct:count").to("gridfs:myDb?database={{mongodb.testDb}}&operation=count");
+                from("direct:headerOp").to("gridfs:myDb?database={{mongodb.testDb}}");
+            }
+        };
+    }
+    
+    @Test
+    public void testOperations() throws Exception {
+        Map<String, Object> headers = new HashMap<String, Object>();
+        String fn = "filename.for.db.txt";
+        assertEquals(0, gridfs.find(fn).size());
+        
+        headers.put(Exchange.FILE_NAME, fn);
+        String data = "This is some stuff to go into the db";
+        template.requestBodyAndHeaders("direct:create", data, headers);
+        assertEquals(1, gridfs.find(fn).size());
+        assertEquals(1, template.requestBodyAndHeaders("direct:count", null, headers));
+        InputStream ins = template.requestBodyAndHeaders("direct:findOne", null, headers, InputStream.class);
+        assertNotNull(ins);
+        byte b[] = new byte[2048];
+        int i = ins.read(b);
+        assertEquals(data, new String(b, 0, i, "utf-8"));
+        
+        headers.put(Exchange.FILE_NAME, "2-" + fn);
+        
+        template.requestBodyAndHeaders("direct:create", data + "data2", headers);
+        assertEquals(1, template.requestBodyAndHeaders("direct:count", null, headers));
+        assertEquals(2, template.requestBody("direct:count", null, Integer.class).intValue());
+        
+        String s = template.requestBody("direct:listAll", null, String.class);
+        assertTrue(s.contains("2-" + fn));
+        template.requestBodyAndHeaders("direct:remove", null, headers);
+        assertEquals(1, template.requestBody("direct:count", null, Integer.class).intValue());
+        s = template.requestBody("direct:listAll", null, String.class);
+        assertFalse(s.contains("2-" + fn));
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/src/test/resources/log4j.properties b/src/test/resources/log4j.properties
new file mode 100644
index 0000000..cb64298
--- /dev/null
+++ b/src/test/resources/log4j.properties
@@ -0,0 +1,37 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+log4j.rootLogger=INFO, file
+# change the logging level of this category to increase verbosity of the MongoDB component
+log4j.category.org.apache.camel.component.mongodb=INFO, file
+log4j.additivity.org.apache.camel.component.mongodb=false
+
+# uncomment the following line to turn on Camel debugging
+#log4j.logger.org.apache.camel=DEBUG
+
+# CONSOLE appender not used by default
+log4j.appender.out=org.apache.log4j.ConsoleAppender
+log4j.appender.out.layout=org.apache.log4j.PatternLayout
+log4j.appender.out.layout.ConversionPattern=[%30.30t] %-30.30c{1} %-5p %m%n
+#log4j.appender.out.layout.ConversionPattern=%d [%-15.15t] %-5p %-30.30c{1} - %m%n
+
+
+# File appender
+log4j.appender.file=org.apache.log4j.FileAppender
+log4j.appender.file.layout=org.apache.log4j.PatternLayout
+log4j.appender.file.layout.ConversionPattern=%d [%-15.15t] %-5p %-30.30c{1} - %m%n
+log4j.appender.file.file=target/camel-mongodb-test.log

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/test/resources/mongodb.test.properties
----------------------------------------------------------------------
diff --git a/src/test/resources/mongodb.test.properties b/src/test/resources/mongodb.test.properties
new file mode 100644
index 0000000..20c529d
--- /dev/null
+++ b/src/test/resources/mongodb.test.properties
@@ -0,0 +1,21 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+mongodb.connectionURI=mongodb://localhost:27017
+mongodb.testDb=test
+mongodb.testCollection=camelTest
+mongodb.cappedTestCollection=camelTestCapped
\ No newline at end of file


[5/9] camel git commit: [CAMEL-9659] Add different strategies for handling the detection of new files

Posted by dk...@apache.org.
[CAMEL-9659] Add different strategies for handling the detection of new files


Project: http://git-wip-us.apache.org/repos/asf/camel/repo
Commit: http://git-wip-us.apache.org/repos/asf/camel/commit/45204104
Tree: http://git-wip-us.apache.org/repos/asf/camel/tree/45204104
Diff: http://git-wip-us.apache.org/repos/asf/camel/diff/45204104

Branch: refs/heads/master
Commit: 452041047e770c24e8802f2cac7cc76080554303
Parents: ce54b04
Author: Daniel Kulp <dk...@apache.org>
Authored: Tue Mar 1 11:48:43 2016 -0500
Committer: Daniel Kulp <dk...@apache.org>
Committed: Tue Mar 1 14:23:07 2016 -0500

----------------------------------------------------------------------
 .../camel/component/gridfs/GridFsConsumer.java  | 98 ++++++++++++++++----
 .../camel/component/gridfs/GridFsEndpoint.java  | 48 +++++++++-
 .../component/gridfs/GridFsConsumerTest.java    | 36 ++++++-
 3 files changed, 157 insertions(+), 25 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/camel/blob/45204104/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java b/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
index 240dd47..35d77ee 100644
--- a/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
@@ -23,13 +23,17 @@ import java.io.InputStream;
 import java.util.concurrent.ExecutorService;
 
 import com.mongodb.BasicDBObject;
+import com.mongodb.BasicDBObjectBuilder;
+import com.mongodb.DBCollection;
 import com.mongodb.DBCursor;
 import com.mongodb.DBObject;
+import com.mongodb.MongoException;
 import com.mongodb.gridfs.GridFSDBFile;
 import com.mongodb.util.JSON;
 
 import org.apache.camel.Exchange;
 import org.apache.camel.Processor;
+import org.apache.camel.component.gridfs.GridFsEndpoint.QueryStrategy;
 import org.apache.camel.impl.DefaultConsumer;
 
 /**
@@ -48,8 +52,6 @@ public class GridFsConsumer extends DefaultConsumer implements Runnable {
         this.endpoint = endpoint;
     }
 
-    
-
     @Override
     protected void doStop() throws Exception {
         super.doStop();
@@ -69,7 +71,38 @@ public class GridFsConsumer extends DefaultConsumer implements Runnable {
     @Override
     public void run() {
         DBCursor c = null;
-        java.util.Date fromDate = new java.util.Date();
+        java.util.Date fromDate = null;
+        
+        QueryStrategy s = endpoint.getQueryStrategy();
+        boolean usesTimestamp = (s != QueryStrategy.FileAttribute);
+        boolean persistsTimestamp = (s == QueryStrategy.PersistentTimestamp || s == QueryStrategy.PersistentTimestampAndFileAttribute);
+        boolean usesAttribute = (s == QueryStrategy.FileAttribute 
+            || s == QueryStrategy.TimeStampAndFileAttribute 
+            || s == QueryStrategy.PersistentTimestampAndFileAttribute);
+        
+        DBCollection ptsCollection = null;
+        DBObject persistentTimestamp = null;
+        if (persistsTimestamp) {
+            ptsCollection = endpoint.getDB().getCollection(endpoint.getPersistentTSCollection());
+         // ensure standard indexes as long as collections are small
+            try {
+                if (ptsCollection.count() < 1000) {
+                    ptsCollection.createIndex(new BasicDBObject("id", 1));
+                }
+            } catch (MongoException e) {
+                //TODO: Logging
+            }
+            persistentTimestamp = ptsCollection.findOne(new BasicDBObject("id", endpoint.getPersistentTSObject()));
+            if (persistentTimestamp == null) {
+                persistentTimestamp = new BasicDBObject("id", endpoint.getPersistentTSObject());
+                fromDate = new java.util.Date();
+                persistentTimestamp.put("timestamp", fromDate);
+                ptsCollection.save(persistentTimestamp);
+            }
+            fromDate = (java.util.Date)persistentTimestamp.get("timestamp");
+        } else if (usesTimestamp) {
+            fromDate = new java.util.Date();
+        }
         try {
             Thread.sleep(endpoint.getInitialDelay());
             while (isStarted()) {                
@@ -84,27 +117,54 @@ public class GridFsConsumer extends DefaultConsumer implements Runnable {
                     } else {
                         query = (DBObject) JSON.parse(queryString);
                     }
-                    
-                    query.put("uploadDate", new BasicDBObject("$gte", fromDate));
+                    if (usesTimestamp) {
+                        query.put("uploadDate", new BasicDBObject("$gt", fromDate));
+                    }
+                    if (usesAttribute) {
+                        query.put(endpoint.getFileAttributeName(), null);
+                    }
                     c = endpoint.getFilesCollection().find(query);
-                    fromDate = new java.util.Date();
                 }
+                boolean dateModified = false;
                 while (c.hasNext() && isStarted()) {
                     GridFSDBFile file = (GridFSDBFile)c.next();
-                    file = endpoint.getGridFs().findOne(new BasicDBObject("_id", file.getId()));
-                    
-                    Exchange exchange = endpoint.createExchange();
-                    exchange.getIn().setHeader(GridFsEndpoint.GRIDFS_METADATA, JSON.serialize(file.getMetaData()));
-                    exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType());
-                    exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength());
-                    exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate());
-                    exchange.getIn().setBody(file.getInputStream(), InputStream.class);
-                    try {
-                        getProcessor().process(exchange);
-                    } catch (Exception e) {
-                        // TODO Auto-generated catch block
-                        e.printStackTrace();
+                    GridFSDBFile forig = file;
+                    if (usesAttribute) {
+                        file.put(endpoint.getFileAttributeName(), "processing");
+                        DBObject q = BasicDBObjectBuilder.start("_id", file.getId()).append("camel-processed", null).get();
+                        forig = (GridFSDBFile)endpoint.getFilesCollection().findAndModify(q, null, null, false, file, true, false);
                     }
+                    if (forig != null) {
+                        file = endpoint.getGridFs().findOne(new BasicDBObject("_id", file.getId()));
+                        
+                        Exchange exchange = endpoint.createExchange();
+                        exchange.getIn().setHeader(GridFsEndpoint.GRIDFS_METADATA, JSON.serialize(file.getMetaData()));
+                        exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType());
+                        exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength());
+                        exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate());
+                        exchange.getIn().setBody(file.getInputStream(), InputStream.class);
+                        try {
+                            getProcessor().process(exchange);
+                            //System.out.println("Processing " + file.getFilename());
+                            if (usesAttribute) {
+                                forig.put(endpoint.getFileAttributeName(), "done");
+                                endpoint.getFilesCollection().save(forig);
+                            }
+                            if (usesTimestamp) {
+                                if (file.getUploadDate().compareTo(fromDate) > 0) {
+                                    fromDate = file.getUploadDate();
+                                    dateModified = true;
+                                }
+                            }
+                        } catch (Exception e) {
+                            // TODO Auto-generated catch block
+                            e.printStackTrace();
+                        }
+                    }
+                }
+                if (persistsTimestamp && dateModified) {
+                    persistentTimestamp.put("timestamp", fromDate);
+                    ptsCollection.save(persistentTimestamp);
                 }
                 Thread.sleep(endpoint.getDelay());
             }

http://git-wip-us.apache.org/repos/asf/camel/blob/45204104/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java b/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
index 008e004..554c4cd 100644
--- a/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
@@ -36,6 +36,14 @@ import org.slf4j.LoggerFactory;
 @UriEndpoint(scheme = "gridfs", title = "MongoDBGridFS", syntax = "gridfs:connectionBean", 
             label = "database,nosql")
 public class GridFsEndpoint extends DefaultEndpoint {
+    
+    public enum QueryStrategy {
+        TimeStamp,
+        PersistentTimestamp,
+        FileAttribute,
+        TimeStampAndFileAttribute,
+        PersistentTimestampAndFileAttribute
+    };
     public static final String GRIDFS_OPERATION = "gridfs.operation";
     public static final String GRIDFS_METADATA = "gridfs.metadata";
     public static final String GRIDFS_CHUNKSIZE = "gridfs.chunksize";
@@ -64,7 +72,16 @@ public class GridFsEndpoint extends DefaultEndpoint {
     @UriParam
     private long delay = 500;
     
-    
+    @UriParam 
+    private QueryStrategy queryStrategy = QueryStrategy.TimeStamp;
+    @UriParam
+    private String persistentTSCollection = "camel-timestamps";
+    @UriParam
+    private String persistentTSObject = "camel-timestamp";
+    @UriParam
+    private String fileAttributeName = "camel-processed";
+
+
     private Mongo mongoConnection;
     private DB db;
     private GridFS gridFs;
@@ -154,6 +171,10 @@ public class GridFsEndpoint extends DefaultEndpoint {
         this.mongoConnection = mongoConnection;
     }
 
+    public DB getDB() {
+        return db;
+    }
+    
     public String getDatabase() {
         return database;
     }
@@ -186,6 +207,31 @@ public class GridFsEndpoint extends DefaultEndpoint {
         this.initialDelay = delay;
     }
     
+    public void setQueryStrategy(String s) {
+        queryStrategy = QueryStrategy.valueOf(s);
+    }
+    public QueryStrategy getQueryStrategy() {
+        return queryStrategy;
+    }
+    public void setPersistentTSCollection(String s) {
+        persistentTSCollection = s;
+    }
+    public String getPersistentTSCollection() {
+        return persistentTSCollection;
+    }
+    public void setPersistentTSObject(String s) {
+        persistentTSObject = s;
+    }
+    public String getPersistentTSObject() {
+        return persistentTSObject;
+    }
+    public void setFileAttributeName(String f) {
+        fileAttributeName = f;
+    }
+    public String getFileAttributeName() {
+        return fileAttributeName;
+    }   
+    
     /**
      * Set the {@link WriteConcern} for write operations on MongoDB using the standard ones.
      * Resolved from the fields of the WriteConcern class by calling the {@link WriteConcern#valueOf(String)} method.

http://git-wip-us.apache.org/repos/asf/camel/blob/45204104/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java b/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java
index a84260c..77b1c6e 100644
--- a/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java
+++ b/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java
@@ -22,6 +22,8 @@ package org.apache.camel.component.gridfs;
 import java.util.HashMap;
 import java.util.Map;
 
+import com.mongodb.gridfs.GridFS;
+
 import org.apache.camel.Exchange;
 import org.apache.camel.builder.RouteBuilder;
 import org.apache.camel.component.mock.MockEndpoint;
@@ -37,14 +39,36 @@ public class GridFsConsumerTest extends AbstractMongoDbTest {
         return new RouteBuilder() {
             public void configure() {
                 from("direct:create").to("gridfs:myDb?database={{mongodb.testDb}}&operation=create&bucket=" + getBucket());
+                from("direct:create-a").to("gridfs:myDb?database={{mongodb.testDb}}&operation=create&bucket=" + getBucket() + "-a");
+                from("direct:create-pts").to("gridfs:myDb?database={{mongodb.testDb}}&operation=create&bucket=" + getBucket() + "-pts");
+                
                 from("gridfs:myDb?database={{mongodb.testDb}}&bucket=" + getBucket()).convertBodyTo(String.class).to("mock:test");
+                from("gridfs:myDb?database={{mongodb.testDb}}&bucket=" + getBucket() + "-a&queryStrategy=FileAttribute")
+                    .convertBodyTo(String.class).to("mock:test");
+                from("gridfs:myDb?database={{mongodb.testDb}}&bucket=" + getBucket() + "-pts&queryStrategy=PersistentTimestamp")
+                    .convertBodyTo(String.class).to("mock:test");
             }
         };
     }
     
     
     @Test
-    public void test() throws Exception {
+    public void testTimestamp() throws Exception {
+        runTest("direct:create", gridfs);
+    }
+    @Test
+    @SuppressWarnings("deprecation")
+    public void testAttribute() throws Exception {
+        runTest("direct:create-a", new GridFS(mongo.getDB("test"), getBucket() + "-a"));
+    }
+    
+    @Test
+    @SuppressWarnings("deprecation")
+    public void testPersistentTS() throws Exception {
+        runTest("direct:create-pts", new GridFS(mongo.getDB("test"), getBucket() + "-pts"));
+    }
+    
+    public void runTest(String target, GridFS gridfs) throws Exception {
         MockEndpoint mock = getMockEndpoint("mock:test");
         String data = "This is some stuff to go into the db";
         mock.expectedMessageCount(1);
@@ -55,7 +79,7 @@ public class GridFsConsumerTest extends AbstractMongoDbTest {
         assertEquals(0, gridfs.find(fn).size());
         
         headers.put(Exchange.FILE_NAME, fn);
-        template.requestBodyAndHeaders("direct:create", data, headers);
+        template.requestBodyAndHeaders(target, data, headers);
         
         mock.assertIsSatisfied();
         mock.reset();
@@ -64,11 +88,13 @@ public class GridFsConsumerTest extends AbstractMongoDbTest {
         mock.expectedBodiesReceived(data, data, data);
         
         headers.put(Exchange.FILE_NAME, fn + "_1");
-        template.requestBodyAndHeaders("direct:create", data, headers);
+        template.requestBodyAndHeaders(target, data, headers);
         headers.put(Exchange.FILE_NAME, fn + "_2");
-        template.requestBodyAndHeaders("direct:create", data, headers);
+        template.requestBodyAndHeaders(target, data, headers);
         headers.put(Exchange.FILE_NAME, fn + "_3");
-        template.requestBodyAndHeaders("direct:create", data, headers);
+        template.requestBodyAndHeaders(target, data, headers);
+        mock.assertIsSatisfied();
+        Thread.sleep(1000);
         mock.assertIsSatisfied();
     }
 


[9/9] camel git commit: [CAMEL-9659] Fix checkstyle issues

Posted by dk...@apache.org.
[CAMEL-9659] Fix checkstyle issues


Project: http://git-wip-us.apache.org/repos/asf/camel/repo
Commit: http://git-wip-us.apache.org/repos/asf/camel/commit/e589faa8
Tree: http://git-wip-us.apache.org/repos/asf/camel/tree/e589faa8
Diff: http://git-wip-us.apache.org/repos/asf/camel/diff/e589faa8

Branch: refs/heads/master
Commit: e589faa820c62a03831d799d721d61dcf11cbdc7
Parents: b3eaf81
Author: Daniel Kulp <dk...@apache.org>
Authored: Tue Mar 1 15:01:37 2016 -0500
Committer: Daniel Kulp <dk...@apache.org>
Committed: Tue Mar 1 15:01:37 2016 -0500

----------------------------------------------------------------------
 .../camel/component/gridfs/GridFsComponent.java |  6 ++--
 .../camel/component/gridfs/GridFsConsumer.java  | 34 +++++++++-----------
 .../component/gridfs/GridFsConsumerTest.java    | 26 +++++++--------
 3 files changed, 32 insertions(+), 34 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/camel/blob/e589faa8/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
----------------------------------------------------------------------
diff --git a/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java b/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
index 26da915..62701a0 100644
--- a/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
+++ b/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
@@ -16,18 +16,20 @@
  */
 package org.apache.camel.component.gridfs;
 
+import java.util.Map;
+
 import com.mongodb.Mongo;
 import org.apache.camel.Endpoint;
 import org.apache.camel.impl.UriEndpointComponent;
 import org.apache.camel.util.CamelContextHelper;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.Map;
 
 public class GridFsComponent extends UriEndpointComponent {
 
-    private final static Logger LOG = LoggerFactory.getLogger(GridFsComponent.class);
+    private static final Logger LOG = LoggerFactory.getLogger(GridFsComponent.class);
 
     private volatile Mongo db;
 

http://git-wip-us.apache.org/repos/asf/camel/blob/e589faa8/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
----------------------------------------------------------------------
diff --git a/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java b/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
index 35d77ee..bfce711 100644
--- a/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
+++ b/components/camel-gridfs/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
@@ -1,20 +1,18 @@
 /**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ *      http://www.apache.org/licenses/LICENSE-2.0
  *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
  */
 
 package org.apache.camel.component.gridfs;
@@ -74,11 +72,11 @@ public class GridFsConsumer extends DefaultConsumer implements Runnable {
         java.util.Date fromDate = null;
         
         QueryStrategy s = endpoint.getQueryStrategy();
-        boolean usesTimestamp = (s != QueryStrategy.FileAttribute);
-        boolean persistsTimestamp = (s == QueryStrategy.PersistentTimestamp || s == QueryStrategy.PersistentTimestampAndFileAttribute);
-        boolean usesAttribute = (s == QueryStrategy.FileAttribute 
+        boolean usesTimestamp = s != QueryStrategy.FileAttribute;
+        boolean persistsTimestamp = s == QueryStrategy.PersistentTimestamp || s == QueryStrategy.PersistentTimestampAndFileAttribute;
+        boolean usesAttribute = s == QueryStrategy.FileAttribute
             || s == QueryStrategy.TimeStampAndFileAttribute 
-            || s == QueryStrategy.PersistentTimestampAndFileAttribute);
+            || s == QueryStrategy.PersistentTimestampAndFileAttribute;
         
         DBCollection ptsCollection = null;
         DBObject persistentTimestamp = null;

http://git-wip-us.apache.org/repos/asf/camel/blob/e589faa8/components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java
----------------------------------------------------------------------
diff --git a/components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java b/components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java
index 77b1c6e..64aa1d9 100644
--- a/components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java
+++ b/components/camel-gridfs/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerTest.java
@@ -1,20 +1,18 @@
 /**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ *      http://www.apache.org/licenses/LICENSE-2.0
  *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
  */
 
 package org.apache.camel.component.gridfs;


[8/9] camel git commit: [CAMEL-9659] Add gridfs component to build

Posted by dk...@apache.org.
[CAMEL-9659] Add gridfs component to build


Project: http://git-wip-us.apache.org/repos/asf/camel/repo
Commit: http://git-wip-us.apache.org/repos/asf/camel/commit/b3eaf817
Tree: http://git-wip-us.apache.org/repos/asf/camel/tree/b3eaf817
Diff: http://git-wip-us.apache.org/repos/asf/camel/diff/b3eaf817

Branch: refs/heads/master
Commit: b3eaf817dfaf2499d58ecea473752a57a6764356
Parents: f1e21c5
Author: Daniel Kulp <dk...@apache.org>
Authored: Tue Mar 1 14:41:08 2016 -0500
Committer: Daniel Kulp <dk...@apache.org>
Committed: Tue Mar 1 14:41:08 2016 -0500

----------------------------------------------------------------------
 components/camel-gridfs/pom.xml | 2 +-
 components/pom.xml              | 1 +
 2 files changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/camel/blob/b3eaf817/components/camel-gridfs/pom.xml
----------------------------------------------------------------------
diff --git a/components/camel-gridfs/pom.xml b/components/camel-gridfs/pom.xml
index ade40e7..ec8e6bd 100644
--- a/components/camel-gridfs/pom.xml
+++ b/components/camel-gridfs/pom.xml
@@ -24,7 +24,7 @@
     <parent>
         <groupId>org.apache.camel</groupId>
         <artifactId>components</artifactId>
-        <version>2.16.2</version>
+        <version>2.17-SNAPSHOT</version>
     </parent>
 
     <artifactId>camel-gridfs</artifactId>

http://git-wip-us.apache.org/repos/asf/camel/blob/b3eaf817/components/pom.xml
----------------------------------------------------------------------
diff --git a/components/pom.xml b/components/pom.xml
index 7da5a56..e14d134 100644
--- a/components/pom.xml
+++ b/components/pom.xml
@@ -114,6 +114,7 @@
     <module>camel-google-mail</module>
     <module>camel-gora</module>
     <module>camel-grape</module>
+    <module>camel-gridfs</module>
     <module>camel-gson</module>
     <module>camel-guava-eventbus</module>
     <module>camel-guice</module>


[3/9] camel git commit: [CAMEL-9659] Add more headers to resulting exchange

Posted by dk...@apache.org.
[CAMEL-9659] Add more headers to resulting exchange


Project: http://git-wip-us.apache.org/repos/asf/camel/repo
Commit: http://git-wip-us.apache.org/repos/asf/camel/commit/f80654f4
Tree: http://git-wip-us.apache.org/repos/asf/camel/tree/f80654f4
Diff: http://git-wip-us.apache.org/repos/asf/camel/diff/f80654f4

Branch: refs/heads/master
Commit: f80654f493481535ca297e240ee03468ab0cd2f7
Parents: bfaec78
Author: Daniel Kulp <dk...@apache.org>
Authored: Mon Feb 29 13:14:31 2016 -0500
Committer: Daniel Kulp <dk...@apache.org>
Committed: Tue Mar 1 14:23:00 2016 -0500

----------------------------------------------------------------------
 .../camel/component/gridfs/GridFsConsumer.java      |  2 +-
 .../camel/component/gridfs/GridFsProducer.java      | 16 ++++++++++++----
 2 files changed, 13 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/camel/blob/f80654f4/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java b/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
index dce195a..4f2ce66 100644
--- a/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
@@ -26,7 +26,7 @@ import org.apache.camel.impl.DefaultConsumer;
  * 
  */
 public class GridFsConsumer extends DefaultConsumer {
-    GridFsEndpoint ep;
+    final GridFsEndpoint ep;
     
     /**
      * @param endpoint

http://git-wip-us.apache.org/repos/asf/camel/blob/f80654f4/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java b/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
index 5178220..7954644 100644
--- a/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
@@ -32,6 +32,10 @@ import org.apache.camel.impl.DefaultProducer;
 
 
 public class GridFsProducer extends DefaultProducer {
+    public static final String GRIDFS_OPERATION = "gridfs.operation";
+    public static final String GRIDFS_METADATA = "gridfs.metadata";
+    public static final String GRIDFS_CHUNKSIZE = "gridfs.chunksize";
+    
     private GridFsEndpoint endpoint;
 
     public GridFsProducer(GridFsEndpoint endpoint) {
@@ -42,11 +46,11 @@ public class GridFsProducer extends DefaultProducer {
     public void process(Exchange exchange) throws Exception {
         String operation = endpoint.getOperation();
         if (operation == null) {
-            operation = exchange.getIn().getHeader("gridfs.operation", String.class);
+            operation = exchange.getIn().getHeader(GRIDFS_OPERATION, String.class);
         }
         if (operation == null || "create".equals(operation)) {
             final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
-            Long chunkSize = exchange.getIn().getHeader("gridfs.chunksize", Long.class);
+            Long chunkSize = exchange.getIn().getHeader(GRIDFS_CHUNKSIZE, Long.class);
 
             InputStream ins = exchange.getIn().getMandatoryBody(InputStream.class);
             GridFSInputFile gfsFile = endpoint.getGridFs().createFile(ins, filename, true);
@@ -57,7 +61,7 @@ public class GridFsProducer extends DefaultProducer {
             if (ct != null) {
                 gfsFile.setContentType(ct);
             }
-            String metaData = exchange.getIn().getHeader("gridfs.metadata", String.class);
+            String metaData = exchange.getIn().getHeader(GRIDFS_METADATA, String.class);
             DBObject dbObject = (DBObject) JSON.parse(metaData);
             gfsFile.setMetaData(dbObject);
             gfsFile.save();
@@ -69,7 +73,11 @@ public class GridFsProducer extends DefaultProducer {
             final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
             GridFSDBFile file = endpoint.getGridFs().findOne(filename);
             if (file != null) {
-                exchange.getIn().setBody(file.getInputStream(), InputStream.class);
+                exchange.getIn().setHeader(GRIDFS_METADATA, JSON.serialize(file.getMetaData()));
+                exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType());
+                exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength());
+                exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate());
+                exchange.getIn().setBody(file.getInputStream(), InputStream.class);                
             } else {
                 throw new FileNotFoundException("No GridFS file for " + filename);
             }