camel-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dk...@apache.org
Subject [2/9] camel git commit: [CAMEL-9659] GridFs producer working with junit test
Date Tue, 01 Mar 2016 20:01:59 GMT
[CAMEL-9659] GridFs producer working with junit test


Project: http://git-wip-us.apache.org/repos/asf/camel/repo
Commit: http://git-wip-us.apache.org/repos/asf/camel/commit/bfaec784
Tree: http://git-wip-us.apache.org/repos/asf/camel/tree/bfaec784
Diff: http://git-wip-us.apache.org/repos/asf/camel/diff/bfaec784

Branch: refs/heads/master
Commit: bfaec78409732532b68aff932fcbe0441fbd6559
Parents: 4c78e44
Author: Daniel Kulp <dkulp@apache.org>
Authored: Fri Feb 26 15:14:02 2016 -0500
Committer: Daniel Kulp <dkulp@apache.org>
Committed: Tue Mar 1 14:22:48 2016 -0500

----------------------------------------------------------------------
 pom.xml                                         |  15 +-
 .../camel/component/gridfs/GridFsComponent.java |  14 +-
 .../camel/component/gridfs/GridFsConsumer.java  |  40 ++++
 .../camel/component/gridfs/GridFsEndpoint.java  | 185 ++++++++++++++-----
 .../camel/component/gridfs/GridFsProducer.java  | 153 ++++++++++-----
 .../component/gridfs/AbstractMongoDbTest.java   |  53 ++++++
 .../gridfs/EmbedMongoConfiguration.java         |  58 ++++++
 .../gridfs/GridFsConsumerOperationsTest.java    |  74 ++++++++
 src/test/resources/log4j.properties             |  37 ++++
 src/test/resources/mongodb.test.properties      |  21 +++
 10 files changed, 542 insertions(+), 108 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 5b1e1a1..ade40e7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -24,7 +24,7 @@
     <parent>
         <groupId>org.apache.camel</groupId>
         <artifactId>components</artifactId>
-        <version>2.13.2</version>
+        <version>2.16.2</version>
     </parent>
 
     <artifactId>camel-gridfs</artifactId>
@@ -52,9 +52,18 @@
         <dependency>
             <groupId>org.mongodb</groupId>
             <artifactId>mongo-java-driver</artifactId>
-            <version>${mongo-java-driver-version}</version>
+            <version>3.2.2</version>
+        </dependency>
+        <dependency>
+            <groupId>de.flapdoodle.embed</groupId>
+            <artifactId>de.flapdoodle.embed.mongo</artifactId>
+            <scope>test</scope>
+        </dependency>
+	<dependency>
+          <groupId>org.apache.camel</groupId>
+          <artifactId>camel-test-spring</artifactId>
+          <scope>test</scope>
         </dependency>
-
     </dependencies>
 
 </project>

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java b/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
index 87d5394..26da915 100644
--- a/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsComponent.java
@@ -18,29 +18,35 @@ package org.apache.camel.component.gridfs;
 
 import com.mongodb.Mongo;
 import org.apache.camel.Endpoint;
-import org.apache.camel.impl.DefaultComponent;
+import org.apache.camel.impl.UriEndpointComponent;
 import org.apache.camel.util.CamelContextHelper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Map;
 
-public class GridFsComponent extends DefaultComponent {
+public class GridFsComponent extends UriEndpointComponent {
 
     private final static Logger LOG = LoggerFactory.getLogger(GridFsComponent.class);
 
     private volatile Mongo db;
 
+    public GridFsComponent() {
+        super(GridFsEndpoint.class);
+    }
+    
     protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object>
parameters) throws Exception {
         if (db == null) {
             db = CamelContextHelper.mandatoryLookup(getCamelContext(), remaining, Mongo.class);
             LOG.debug("Resolved the connection with the name {} as {}", remaining, db);
         }
 
-        Endpoint endpoint = new GridFsEndpoint(uri, this);
+        GridFsEndpoint endpoint = new GridFsEndpoint(uri, this);
         parameters.put("mongoConnection", db);
+        endpoint.setConnectionBean(remaining);
+        endpoint.setMongoConnection(db);
         setProperties(endpoint, parameters);
-
+        
         return endpoint;
     }
 

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java b/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
new file mode 100644
index 0000000..dce195a
--- /dev/null
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsConsumer.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.camel.component.gridfs;
+
+import org.apache.camel.Processor;
+import org.apache.camel.impl.DefaultConsumer;
+
+/**
+ * 
+ */
+public class GridFsConsumer extends DefaultConsumer {
+    GridFsEndpoint ep;
+    
+    /**
+     * @param endpoint
+     * @param processor
+     */
+    public GridFsConsumer(GridFsEndpoint endpoint, Processor processor) {
+        super(endpoint, processor);
+        ep = endpoint;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java b/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
index d630160..cef109a 100644
--- a/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsEndpoint.java
@@ -17,70 +17,125 @@
 package org.apache.camel.component.gridfs;
 
 import com.mongodb.DB;
-import com.mongodb.DBCollection;
 import com.mongodb.Mongo;
+import com.mongodb.ReadPreference;
+import com.mongodb.WriteConcern;
 import com.mongodb.gridfs.GridFS;
 import org.apache.camel.Consumer;
 import org.apache.camel.Processor;
 import org.apache.camel.Producer;
 import org.apache.camel.impl.DefaultEndpoint;
+import org.apache.camel.spi.Metadata;
+import org.apache.camel.spi.UriEndpoint;
+import org.apache.camel.spi.UriParam;
+import org.apache.camel.spi.UriPath;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+@UriEndpoint(scheme = "gridfs", title = "MongoDBGridFS", syntax = "gridfs:connectionBean",

+            label = "database,nosql")
 public class GridFsEndpoint extends DefaultEndpoint {
 
     private static final Logger LOG = LoggerFactory.getLogger(GridFsEndpoint.class);
 
-    private Mongo mongoConnection;
+    @UriPath @Metadata(required = "true")
+    private String connectionBean;
+    @UriParam
     private String database;
-    private String colCounters;
-    private String colTP;
-    private DBCollection dbColCounters;
-    private DBCollection dbColTP;
+    @UriParam
+    private String bucket;
+    @UriParam(enums = "ACKNOWLEDGED,W1,W2,W3,UNACKNOWLEDGED,JOURNALED,MAJORITY,SAFE")
+    private WriteConcern writeConcern;
+    @UriParam
+    private WriteConcern writeConcernRef;
+    @UriParam
+    private ReadPreference readPreference;
+    @UriParam
+    private String operation;
+
+    
+    private Mongo mongoConnection;
     private DB db;
     private GridFS gridFs;
 
-    public GridFsEndpoint() { }
-
     public GridFsEndpoint(String uri, GridFsComponent component) {
         super(uri, component);
     }
 
-    public GridFsEndpoint(String endpointUri) {
-        super(endpointUri);
-    }
-
+    @Override
     public Producer createProducer() throws Exception {
         initializeConnection();
         return new GridFsProducer(this);
     }
 
-    public Consumer createConsumer(Processor processor) {
-        return null;
+    @Override
+    public Consumer createConsumer(Processor processor) throws Exception {
+        initializeConnection();
+        return new GridFsConsumer(this, processor);
     }
 
     public boolean isSingleton() {
         return true;
     }
 
+    @SuppressWarnings("deprecation")
     public void initializeConnection() throws Exception {
         LOG.info("Initialize GridFS endpoint: {}", this.toString());
-        if (database == null || colCounters == null || colTP == null) {
-            throw new IllegalStateException("Missing required endpoint configuration: database
and/or colCounters and/or colTP");
+        if (database == null) {
+            throw new IllegalStateException("Missing required endpoint configuration: database");
         }
         db = mongoConnection.getDB(database);
         if (db == null) {
             throw new IllegalStateException("Could not initialize GridFsComponent. Database
" + database + " does not exist.");
         }
-        dbColCounters = db.getCollection(colCounters);
-        dbColTP = db.getCollection(colTP);
-        gridFs = new GridFS(db);
+        gridFs = new GridFS(db, bucket == null ? GridFS.DEFAULT_BUCKET : bucket);
     }
 
+    
+    @Override
+    protected void doStart() throws Exception {
+        if (writeConcern != null && writeConcernRef != null) {
+            String msg = "Cannot set both writeConcern and writeConcernRef at the same time.
Respective values: " + writeConcern
+                    + ", " + writeConcernRef + ". Aborting initialization.";
+            throw new IllegalArgumentException(msg);
+        }
+
+        setWriteReadOptionsOnConnection();
+        super.doStart();
+    }
+    private void setWriteReadOptionsOnConnection() {
+        // Set the WriteConcern
+        if (writeConcern != null) {
+            mongoConnection.setWriteConcern(writeConcern);
+        } else if (writeConcernRef != null) {
+            mongoConnection.setWriteConcern(writeConcernRef);
+        }
+
+        // Set the ReadPreference
+        if (readPreference != null) {
+            mongoConnection.setReadPreference(readPreference);
+        }
+    }
+    
+    
+    
+    
+    // ======= Getters and setters ===============================================
+
+
+    public String getConnectionBean() {
+        return connectionBean;
+    }
+    /**
+     * Name of {@link com.mongodb.Mongo} to use.
+     */
+    public void setConnectionBean(String connectionBean) {
+        this.connectionBean = connectionBean;
+    }
+    
     public Mongo getMongoConnection() {
         return mongoConnection;
     }
-
     public void setMongoConnection(Mongo mongoConnection) {
         this.mongoConnection = mongoConnection;
     }
@@ -88,49 +143,79 @@ public class GridFsEndpoint extends DefaultEndpoint {
     public String getDatabase() {
         return database;
     }
-
     public void setDatabase(String database) {
         this.database = database;
     }
+    public String getBucket() {
+        return bucket;
+    }
+    public void setBucket(String bucket) {
+        this.bucket = bucket;
+    }
+    
+    /**
+     * Set the {@link WriteConcern} for write operations on MongoDB using the standard ones.
+     * Resolved from the fields of the WriteConcern class by calling the {@link WriteConcern#valueOf(String)}
method.
+     * 
+     * @param writeConcern the standard name of the WriteConcern
+     * @see <a href="http://api.mongodb.org/java/current/com/mongodb/WriteConcern.html#valueOf(java.lang.String)">possible
options</a>
+     */
+    public void setWriteConcern(String writeConcern) {
+        this.writeConcern = WriteConcern.valueOf(writeConcern);
+    }
+
+    public WriteConcern getWriteConcern() {
+        return writeConcern;
+    }
+
+    /**
+     * Set the {@link WriteConcern} for write operations on MongoDB, passing in the bean
ref to a custom WriteConcern which exists in the Registry.
+     * You can also use standard WriteConcerns by passing in their key. See the {@link #setWriteConcern(String)
setWriteConcern} method.
+     * 
+     * @param writeConcernRef the name of the bean in the registry that represents the WriteConcern
to use
+     */
+    public void setWriteConcernRef(String writeConcernRef) {
+        WriteConcern wc = this.getCamelContext().getRegistry().lookupByNameAndType(writeConcernRef,
WriteConcern.class);
+        if (wc == null) {
+            String msg = "Camel MongoDB component could not find the WriteConcern in the
Registry. Verify that the "
+                    + "provided bean name (" + writeConcernRef + ")  is correct. Aborting
initialization.";
+            throw new IllegalArgumentException(msg);
+        }
 
-    public String getColCounters() {
-        return colCounters;
-    }
-
-    public void setColCounters(String colCounters) {
-        this.colCounters = colCounters;
-    }
-
-    public String getColTP() {
-        return colTP;
-    }
-
-    public void setColTP(String colTP) {
-        this.colTP = colTP;
-    }
-
-    public DBCollection getDbColCounters() {
-        return dbColCounters;
+        this.writeConcernRef = wc;
     }
 
-    public void setDbColCounters(DBCollection dbColCounters) {
-        this.dbColCounters = dbColCounters;
+    public WriteConcern getWriteConcernRef() {
+        return writeConcernRef;
     }
 
-    public DBCollection getDbColTP() {
-        return dbColTP;
+    /** 
+     * Sets a MongoDB {@link ReadPreference} on the Mongo connection. Read preferences set
directly on the connection will be
+     * overridden by this setting.
+     * <p/>
+     * The {@link com.mongodb.ReadPreference#valueOf(String)} utility method is used to resolve
the passed {@code readPreference}
+     * value. Some examples for the possible values are {@code nearest}, {@code primary}
or {@code secondary} etc.
+     * 
+     * @param readPreference the name of the read preference to set
+     */
+    public void setReadPreference(String readPreference) {
+        this.readPreference = ReadPreference.valueOf(readPreference);
     }
 
-    public void setDbColTP(DBCollection dbColTP) {
-        this.dbColTP = dbColTP;
+    public ReadPreference getReadPreference() {
+        return readPreference;
     }
-
-    public DB getDb() {
-        return db;
+    
+    
+    /**
+     * Sets the operation this endpoint will execute against GridRS.
+     */
+    public void setOperation(String operation) {
+        this.operation = operation;
     }
 
-    public void setDb(DB db) {
-        this.db = db;
+    public String getOperation() {
+        return operation;
     }
 
     public GridFS getGridFs() {

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java b/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
index e76af23..5178220 100644
--- a/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
+++ b/src/main/java/org/apache/camel/component/gridfs/GridFsProducer.java
@@ -16,19 +16,22 @@
  */
 package org.apache.camel.component.gridfs;
 
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Reader;
+
+import com.mongodb.BasicDBObject;
+import com.mongodb.DBCursor;
 import com.mongodb.DBObject;
+import com.mongodb.gridfs.GridFSDBFile;
 import com.mongodb.gridfs.GridFSInputFile;
 import com.mongodb.util.JSON;
 import org.apache.camel.Exchange;
 import org.apache.camel.impl.DefaultProducer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
-import java.io.File;
 
 public class GridFsProducer extends DefaultProducer {
-
-    private static final Logger LOG = LoggerFactory.getLogger(GridFsProducer.class);
     private GridFsEndpoint endpoint;
 
     public GridFsProducer(GridFsEndpoint endpoint) {
@@ -37,54 +40,102 @@ public class GridFsProducer extends DefaultProducer {
     }
 
     public void process(Exchange exchange) throws Exception {
-        // set DBObject for query
-        DBObject dbObjQuery = (DBObject) JSON.parse("{_id:'inventory'}");
-
-        // set DBObject for update
-        DBObject dbObjUpdate = (DBObject) JSON.parse("{$inc:{seq:1}}");
-
-        // get inventoryID
-        DBObject invID = endpoint.getDbColCounters().findAndModify(dbObjQuery, dbObjUpdate);
-
-        // get the in message body
-        String TPID = exchange.getIn().getBody().toString();
-
-        // TODO set generic
-        // specific: get trading partner name, load_type, do_legacy
-        DBObject dbObjTPQuery = (DBObject) JSON.parse("{'tpid':'" + TPID + "'}");
-        DBObject tpName = endpoint.getDbColTP().findOne(dbObjTPQuery);
-
-        // set the tpName and tpLoadType in the headers
-        exchange.getIn().setHeader("tpName", tpName.get("name").toString());
-        exchange.getIn().setHeader("tpLoadType", tpName.get("load_type").toString());
-        // most won't have do_legacy, so catch error and default to 'Y'
-        try {
-            exchange.getIn().setHeader("tpDoLegacy", tpName.get("do_legacy").toString());
-        } catch (Exception e) {
-            exchange.getIn().setHeader("tpDoLegacy", "Y");
+        String operation = endpoint.getOperation();
+        if (operation == null) {
+            operation = exchange.getIn().getHeader("gridfs.operation", String.class);
         }
+        if (operation == null || "create".equals(operation)) {
+            final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
+            Long chunkSize = exchange.getIn().getHeader("gridfs.chunksize", Long.class);
+
+            InputStream ins = exchange.getIn().getMandatoryBody(InputStream.class);
+            GridFSInputFile gfsFile = endpoint.getGridFs().createFile(ins, filename, true);
+            if (chunkSize != null && chunkSize > 0) {
+                gfsFile.setChunkSize(chunkSize);
+            }
+            final String ct = exchange.getIn().getHeader(Exchange.CONTENT_TYPE, String.class);
+            if (ct != null) {
+                gfsFile.setContentType(ct);
+            }
+            String metaData = exchange.getIn().getHeader("gridfs.metadata", String.class);
+            DBObject dbObject = (DBObject) JSON.parse(metaData);
+            gfsFile.setMetaData(dbObject);
+            gfsFile.save();
+            exchange.getIn().setHeader(Exchange.FILE_NAME_PRODUCED, gfsFile.getFilename());
+        } else if ("remove".equals(operation)) {
+            final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
+            endpoint.getGridFs().remove(filename);
+        } else if ("findOne".equals(operation)) {
+            final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
+            GridFSDBFile file = endpoint.getGridFs().findOne(filename);
+            if (file != null) {
+                exchange.getIn().setBody(file.getInputStream(), InputStream.class);
+            } else {
+                throw new FileNotFoundException("No GridFS file for " + filename);
+            }
+        } else if ("listAll".equals(operation)) {
+            final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
+            DBCursor cursor;
+            if (filename == null) {
+                cursor = endpoint.getGridFs().getFileList();
+            } else {
+                cursor = endpoint.getGridFs().getFileList(new BasicDBObject("filename", filename));
+            }
+            exchange.getIn().setBody(new DBCursorFilenameReader(cursor), Reader.class);
+        } else if ("count".equals(operation)) {
+            final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
+            DBCursor cursor;
+            if (filename == null) {
+                cursor = endpoint.getGridFs().getFileList();
+            } else {
+                cursor = endpoint.getGridFs().getFileList(new BasicDBObject("filename", filename));
+            }
+            exchange.getIn().setBody(cursor.count(), Integer.class);
+        } 
+        
+    }
 
-        // save the TPID for move
-        exchange.getIn().setHeader("TPID", TPID);
-
-        String sInv = invID.get("seq").toString();
-        // strip off decimal
-        sInv = sInv.substring(0, sInv.lastIndexOf("."));
-        exchange.getIn().setHeader("mInv", sInv);
-
-        File file = new File(exchange.getIn().getHeader("gridFsInputFile").toString());
-        GridFSInputFile gfsFile = endpoint.getGridFs().createFile(file);
-
-        // set filename
-        gfsFile.setFilename(exchange.getIn().getHeader("gridFsFileName").toString());
-
-        // add metadata
-        String metaData = "{'inventoryID':" + sInv + ", 'TPID':'" + TPID + "', 'doc_type':'original',
'status':'initial_save'}";
-        DBObject dbObject = (DBObject) JSON.parse(metaData);
-        gfsFile.setMetaData(dbObject);
+    
+    private class DBCursorFilenameReader extends Reader {
+        DBCursor cursor;
+        StringBuilder current;
+        int pos;
+        
+        DBCursorFilenameReader(DBCursor c) {
+            cursor = c;
+            current = new StringBuilder(4096);
+            pos = 0;
+            fill();
+        }
+        void fill() {
+            if (pos > 0) {
+                current.delete(0, pos);
+                pos = 0;
+            }
+            while (cursor.hasNext() && current.length() < 4000) {
+                DBObject o = cursor.next();
+                current.append(o.get("filename")).append("\n");
+            }
+        }
+        @Override
+        public int read(char[] cbuf, int off, int len) throws IOException {
+            if (pos == current.length()) {
+                fill();
+            }
+            if (pos == current.length()) {
+                return -1;
+            }
+            if (len > (current.length() - pos)) {
+                len = current.length() - pos;
+            }
+            current.getChars(pos, pos + len, cbuf, off);
+            pos += len;
+            return len;
+        }
 
-        // save the input file into mongoDB
-        gfsFile.save();
+        @Override
+        public void close() throws IOException {
+            cursor.close();
+        }
     }
-
 }

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java b/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java
new file mode 100644
index 0000000..b1c94b9
--- /dev/null
+++ b/src/test/java/org/apache/camel/component/gridfs/AbstractMongoDbTest.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.gridfs;
+
+
+import com.mongodb.MongoClient;
+import com.mongodb.gridfs.GridFS;
+
+import org.apache.camel.CamelContext;
+import org.apache.camel.component.properties.PropertiesComponent;
+import org.apache.camel.spring.SpringCamelContext;
+import org.apache.camel.test.junit4.CamelTestSupport;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+public abstract class AbstractMongoDbTest extends CamelTestSupport {
+
+    protected static MongoClient mongo;
+    protected static GridFS gridfs;
+
+    protected ApplicationContext applicationContext;
+
+    @SuppressWarnings("deprecation")
+    @Override
+    public void doPostSetup() {
+        mongo = applicationContext.getBean(MongoClient.class);
+        gridfs = new GridFS(mongo.getDB("test"));
+    }
+
+
+    @Override
+    protected CamelContext createCamelContext() throws Exception {
+        applicationContext = new AnnotationConfigApplicationContext(EmbedMongoConfiguration.class);
+        CamelContext ctx = new SpringCamelContext(applicationContext);
+        PropertiesComponent pc = new PropertiesComponent("classpath:mongodb.test.properties");
+        ctx.addComponent("properties", pc);
+        return ctx;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/test/java/org/apache/camel/component/gridfs/EmbedMongoConfiguration.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/camel/component/gridfs/EmbedMongoConfiguration.java
b/src/test/java/org/apache/camel/component/gridfs/EmbedMongoConfiguration.java
new file mode 100644
index 0000000..d755a45
--- /dev/null
+++ b/src/test/java/org/apache/camel/component/gridfs/EmbedMongoConfiguration.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.gridfs;
+
+import java.io.IOException;
+import java.net.UnknownHostException;
+
+import com.mongodb.MongoClient;
+import de.flapdoodle.embed.mongo.MongodExecutable;
+import de.flapdoodle.embed.mongo.MongodStarter;
+import de.flapdoodle.embed.mongo.config.IMongodConfig;
+import de.flapdoodle.embed.mongo.config.MongodConfigBuilder;
+import de.flapdoodle.embed.mongo.config.Net;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import static de.flapdoodle.embed.mongo.distribution.Version.Main.PRODUCTION;
+import static de.flapdoodle.embed.process.runtime.Network.localhostIsIPv6;
+import static org.springframework.util.SocketUtils.findAvailableTcpPort;
+
+@Configuration
+public class EmbedMongoConfiguration {
+
+    private static final int PORT = findAvailableTcpPort();
+
+    static {
+        try {
+            IMongodConfig mongodConfig = new MongodConfigBuilder()
+                    .version(PRODUCTION)
+                    .net(new Net(PORT, localhostIsIPv6()))
+                    .build();
+            MongodExecutable mongodExecutable = MongodStarter.getDefaultInstance().prepare(mongodConfig);
+            mongodExecutable.start();
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    @Bean
+    public MongoClient myDb() throws UnknownHostException {
+        return new MongoClient("0.0.0.0", PORT);
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerOperationsTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerOperationsTest.java
b/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerOperationsTest.java
new file mode 100644
index 0000000..8aaa0de
--- /dev/null
+++ b/src/test/java/org/apache/camel/component/gridfs/GridFsConsumerOperationsTest.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.gridfs;
+
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.builder.RouteBuilder;
+import org.junit.Test;
+
+public class GridFsConsumerOperationsTest extends AbstractMongoDbTest {
+    
+    @Override
+    protected RouteBuilder createRouteBuilder() throws Exception {
+        return new RouteBuilder() {
+            public void configure() {
+                from("direct:create").to("gridfs:myDb?database={{mongodb.testDb}}&operation=create");
+                from("direct:remove").to("gridfs:myDb?database={{mongodb.testDb}}&operation=remove");
+                from("direct:findOne").to("gridfs:myDb?database={{mongodb.testDb}}&operation=findOne");
+                from("direct:listAll").to("gridfs:myDb?database={{mongodb.testDb}}&operation=listAll");
+                from("direct:count").to("gridfs:myDb?database={{mongodb.testDb}}&operation=count");
+                from("direct:headerOp").to("gridfs:myDb?database={{mongodb.testDb}}");
+            }
+        };
+    }
+    
+    @Test
+    public void testOperations() throws Exception {
+        Map<String, Object> headers = new HashMap<String, Object>();
+        String fn = "filename.for.db.txt";
+        assertEquals(0, gridfs.find(fn).size());
+        
+        headers.put(Exchange.FILE_NAME, fn);
+        String data = "This is some stuff to go into the db";
+        template.requestBodyAndHeaders("direct:create", data, headers);
+        assertEquals(1, gridfs.find(fn).size());
+        assertEquals(1, template.requestBodyAndHeaders("direct:count", null, headers));
+        InputStream ins = template.requestBodyAndHeaders("direct:findOne", null, headers,
InputStream.class);
+        assertNotNull(ins);
+        byte b[] = new byte[2048];
+        int i = ins.read(b);
+        assertEquals(data, new String(b, 0, i, "utf-8"));
+        
+        headers.put(Exchange.FILE_NAME, "2-" + fn);
+        
+        template.requestBodyAndHeaders("direct:create", data + "data2", headers);
+        assertEquals(1, template.requestBodyAndHeaders("direct:count", null, headers));
+        assertEquals(2, template.requestBody("direct:count", null, Integer.class).intValue());
+        
+        String s = template.requestBody("direct:listAll", null, String.class);
+        assertTrue(s.contains("2-" + fn));
+        template.requestBodyAndHeaders("direct:remove", null, headers);
+        assertEquals(1, template.requestBody("direct:count", null, Integer.class).intValue());
+        s = template.requestBody("direct:listAll", null, String.class);
+        assertFalse(s.contains("2-" + fn));
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/src/test/resources/log4j.properties b/src/test/resources/log4j.properties
new file mode 100644
index 0000000..cb64298
--- /dev/null
+++ b/src/test/resources/log4j.properties
@@ -0,0 +1,37 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+log4j.rootLogger=INFO, file
+# change the logging level of this category to increase verbosity of the MongoDB component
+log4j.category.org.apache.camel.component.mongodb=INFO, file
+log4j.additivity.org.apache.camel.component.mongodb=false
+
+# uncomment the following line to turn on Camel debugging
+#log4j.logger.org.apache.camel=DEBUG
+
+# CONSOLE appender not used by default
+log4j.appender.out=org.apache.log4j.ConsoleAppender
+log4j.appender.out.layout=org.apache.log4j.PatternLayout
+log4j.appender.out.layout.ConversionPattern=[%30.30t] %-30.30c{1} %-5p %m%n
+#log4j.appender.out.layout.ConversionPattern=%d [%-15.15t] %-5p %-30.30c{1} - %m%n
+
+
+# File appender
+log4j.appender.file=org.apache.log4j.FileAppender
+log4j.appender.file.layout=org.apache.log4j.PatternLayout
+log4j.appender.file.layout.ConversionPattern=%d [%-15.15t] %-5p %-30.30c{1} - %m%n
+log4j.appender.file.file=target/camel-mongodb-test.log

http://git-wip-us.apache.org/repos/asf/camel/blob/bfaec784/src/test/resources/mongodb.test.properties
----------------------------------------------------------------------
diff --git a/src/test/resources/mongodb.test.properties b/src/test/resources/mongodb.test.properties
new file mode 100644
index 0000000..20c529d
--- /dev/null
+++ b/src/test/resources/mongodb.test.properties
@@ -0,0 +1,21 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+mongodb.connectionURI=mongodb://localhost:27017
+mongodb.testDb=test
+mongodb.testCollection=camelTest
+mongodb.cappedTestCollection=camelTestCapped
\ No newline at end of file


Mime
View raw message