mnemonic-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ga...@apache.org
Subject incubator-mnemonic git commit: MNEMONIC-182: MneOutputFormat & MneMapreduceRecordWriter MNEMONIC-181: Add a new module for hadoop integration MNEMONIC-187: Add testcase of hadoop mapreduce
Date Tue, 07 Feb 2017 00:35:28 GMT
Repository: incubator-mnemonic
Updated Branches:
  refs/heads/0.5.0-dev 59051d41a -> 12fb0f6c7


MNEMONIC-182: MneOutputFormat & MneMapreduceRecordWriter
MNEMONIC-181: Add a new module for hadoop integration
MNEMONIC-187: Add testcase of hadoop mapreduce


Project: http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/commit/12fb0f6c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/tree/12fb0f6c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/diff/12fb0f6c

Branch: refs/heads/0.5.0-dev
Commit: 12fb0f6c73a9969d5357a75908b8c63d54f98fe2
Parents: 59051d4
Author: Wang, Gang(Gary) <gang1.wang@intel.com>
Authored: Tue Jan 31 13:48:11 2017 -0800
Committer: Wang, Gang(Gary) <gang1.wang@intel.com>
Committed: Mon Feb 6 16:26:04 2017 -0800

----------------------------------------------------------------------
 .../mnemonic-hadoop-mapreduce/pom.xml           |  66 +++++++
 .../apache/mnemonic/hadoop/MneConfigHelper.java | 191 ++++++++++++++++++
 .../hadoop/mapreduce/MneInputFormat.java        |  52 +++++
 .../mapreduce/MneMapreduceRecordReader.java     | 101 ++++++++++
 .../mapreduce/MneMapreduceRecordWriter.java     | 195 +++++++++++++++++++
 .../hadoop/mapreduce/MneOutputFormat.java       |  41 ++++
 .../src/main/resources/log4j.properties         |  33 ++++
 .../mnemonic/mapreduce/MneMapreduceIOTest.java  | 131 +++++++++++++
 .../org/apache/mnemonic/mapreduce/Person.java   | 109 +++++++++++
 .../mnemonic/mapreduce/PersonListEFProxy.java   |  40 ++++
 .../src/test/resources/testng.xml               |  34 ++++
 mnemonic-hadoop/pom.xml                         | 179 +++++++++++++++++
 pom.xml                                         |   1 +
 13 files changed, 1173 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/12fb0f6c/mnemonic-hadoop/mnemonic-hadoop-mapreduce/pom.xml
----------------------------------------------------------------------
diff --git a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/pom.xml b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/pom.xml
new file mode 100644
index 0000000..8273fcf
--- /dev/null
+++ b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/pom.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.mnemonic</groupId>
+    <artifactId>mnemonic-hadoop</artifactId>
+    <version>0.4.0-incubating-SNAPSHOT</version>
+  </parent>
+
+  <artifactId>mnemonic-hadoop-mapreduce</artifactId>
+  <name>mnemonic-hadoop-mapreduce</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <test.tmp.dir>${project.build.directory}/testing-tmp</test.tmp.dir>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.testng</groupId>
+      <artifactId>testng</artifactId>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+      </plugin>
+      <plugin>
+        <groupId>org.bsc.maven</groupId>
+        <artifactId>maven-processor-plugin</artifactId>
+      </plugin>
+    </plugins>
+  </build>
+
+  <profiles>
+  </profiles>
+
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/12fb0f6c/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/MneConfigHelper.java
----------------------------------------------------------------------
diff --git a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/MneConfigHelper.java b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/MneConfigHelper.java
new file mode 100644
index 0000000..7bbcddb
--- /dev/null
+++ b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/MneConfigHelper.java
@@ -0,0 +1,191 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.mnemonic.hadoop;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.mnemonic.ConfigurationException;
+import org.apache.mnemonic.DurableType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Define the configuration helper.
+ */
+public class MneConfigHelper {
+
+  private static final String INPUT_DURABLE_TYPES_CONFIG = "mnemonic.input.durable.types";
+  private static final String INPUT_ENTITY_FACTORY_PROXIES = "mnemonic.input.entity.factory.proxies.class";
+  private static final String OUTPUT_DURABLE_TYPES_CONFIG = "mnemonic.output.durable.types";
+  private static final String OUTPUT_ENTITY_FACTORY_PROXIES = "mnemonic.output.entity.factory.proxies.class";
+  private static final String INPUT_SLOT_KEY_ID = "mnemonic.input.slot.key.id";
+  private static final String OUTPUT_SLOT_KEY_ID = "mnemonic.output.slot.key.id";
+  private static final String INPUT_MEM_SERVICE_NAME = "mnemonic.input.mem.service.name";
+  private static final String OUTPUT_MEM_SERVICE_NAME = "mnemonic.output.mem.service.name";
+  private static final String OUTPUT_MEM_POOL_SIZE = "mnemonic.output.mem.pool.size";
+//  private static final String RECORD_FACTORY_CLASS = "mnemonic.record_factory_class";
+  private static final long DEFAULT_OUTPUT_MEM_POOL_SIZE = 1024L * 1024 * 1024 * 4;
+  public static final String FILE_EXTENSION = ".mne";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(MneConfigHelper.class);
+
+  public static void setInputDurableTypes(Configuration conf, DurableType[] dtypes) {
+    String val = StringUtils.join(dtypes, ",");
+    conf.set(INPUT_DURABLE_TYPES_CONFIG, val);
+  }
+
+  public static DurableType[] getInputDurableTypes(Configuration conf) {
+    List<DurableType> ret = new ArrayList<>();
+    String val = conf.get(INPUT_DURABLE_TYPES_CONFIG);
+    String[] vals = StringUtils.split(val, ",");
+    for (String itm : vals) {
+      ret.add(DurableType.valueOf(itm));
+    }
+    return ret.toArray(new DurableType[0]);
+  }
+
+  public static void setOutputDurableTypes(Configuration conf, DurableType[] dtypes) {
+    String val = StringUtils.join(dtypes, ",");
+    conf.set(OUTPUT_DURABLE_TYPES_CONFIG, val);
+  }
+
+  public static DurableType[] getOutputDurableTypes(Configuration conf) {
+    List<DurableType> ret = new ArrayList<>();
+    String val = conf.get(OUTPUT_DURABLE_TYPES_CONFIG);
+    String[] vals = StringUtils.split(val, ",");
+    for (String itm : vals) {
+      ret.add(DurableType.valueOf(itm));
+    }
+    return ret.toArray(new DurableType[0]);
+  }
+
+  public static void setInputEntityFactoryProxies(Configuration conf, Class<?>[] proxies) {
+    List<String> vals = new ArrayList<>();
+    for (Class<?> itm : proxies) {
+      vals.add(itm.getName());
+    }
+    conf.setStrings(INPUT_ENTITY_FACTORY_PROXIES, vals.toArray(new String[0]));
+  }
+
+  public static Class<?>[] getInputEntityFactoryProxies(Configuration conf) {
+    List<Class<?>> ret = new ArrayList<>();
+    String[] vals = conf.getStrings(INPUT_ENTITY_FACTORY_PROXIES);
+    String clsname = null;
+    try {
+      for (String itm : vals) {
+        clsname = itm;
+        ret.add(Class.forName(itm));
+      }
+    } catch (ClassNotFoundException | NoClassDefFoundError e) {
+      throw new RuntimeException(String.format("Unable to find class '%s'", clsname), e);
+    }
+    return ret.toArray(new Class<?>[0]);
+  }
+
+  public static void setOutputEntityFactoryProxies(Configuration conf, Class<?>[] proxies) {
+    List<String> vals = new ArrayList<>();
+    for (Class<?> itm : proxies) {
+      vals.add(itm.getName());
+    }
+    conf.setStrings(OUTPUT_ENTITY_FACTORY_PROXIES, vals.toArray(new String[0]));
+  }
+
+  public static Class<?>[] getOutputEntityFactoryProxies(Configuration conf) {
+    List<Class<?>> ret = new ArrayList<>();
+    String[] vals = conf.getStrings(OUTPUT_ENTITY_FACTORY_PROXIES);
+    String clsname = null;
+    try {
+      for (String itm : vals) {
+        clsname = itm;
+        ret.add(Class.forName(itm));
+      }
+    } catch (ClassNotFoundException | NoClassDefFoundError e) {
+      throw new RuntimeException(String.format("Unable to find class '%s'", clsname), e);
+    }
+    return ret.toArray(new Class<?>[0]);
+  }
+
+  public static void setInputSlotKeyId(Configuration conf, long keyid) {
+    conf.setLong(INPUT_SLOT_KEY_ID, keyid);
+  }
+
+  public static long getInputSlotKeyId(Configuration conf) {
+    return conf.getLong(INPUT_SLOT_KEY_ID, 0L);
+  }
+
+  public static void setOutputSlotKeyId(Configuration conf, long keyid) {
+    conf.setLong(OUTPUT_SLOT_KEY_ID, keyid);
+  }
+
+  public static long getOutputSlotKeyId(Configuration conf) {
+    return conf.getLong(OUTPUT_SLOT_KEY_ID, 0L);
+  }
+
+  public static void setInputMemServiceName(Configuration conf, String name) {
+    conf.set(INPUT_MEM_SERVICE_NAME, name);
+  }
+
+  public static String getInputMemServiceName(Configuration conf) {
+    String ret = conf.get(INPUT_MEM_SERVICE_NAME);
+    if (null == ret) {
+      throw new ConfigurationException("You must set the input mem service name");
+    }
+    return ret;
+  }
+
+  public static void setOutputMemServiceName(Configuration conf, String name) {
+    conf.set(OUTPUT_MEM_SERVICE_NAME, name);
+  }
+
+  public static String getOutputMemServiceName(Configuration conf) {
+    String ret = conf.get(OUTPUT_MEM_SERVICE_NAME);
+    if (null == ret) {
+      throw new ConfigurationException("You must set the output mem service name");
+    }
+    return ret;
+  }
+
+  public static void setOutputMemPoolSize(Configuration conf, long size) {
+    conf.setLong(OUTPUT_MEM_POOL_SIZE, size);
+  }
+
+  public static long getOutputMemPoolSize(Configuration conf) {
+    return conf.getLong(OUTPUT_MEM_POOL_SIZE, DEFAULT_OUTPUT_MEM_POOL_SIZE);
+  }
+
+//
+//  public static Class<?> getRecordFactory(Configuration conf) {
+//    Class<?> ret;
+//    String clsname = conf.get(RECORD_FACTORY_CLASS);
+//    try {
+//      ret = Class.forName(clsname);
+//    } catch (ClassNotFoundException | NoClassDefFoundError e) {
+//      throw new RuntimeException(String.format("Unable to find record factory class '%s'", clsname), e);
+//    }
+//    return ret;
+//  }
+//
+//  public static void setRecordFactory(Configuration conf, Class<?> recf) {
+//    conf.setStrings(RECORD_FACTORY_CLASS, recf.getName());
+//  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/12fb0f6c/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneInputFormat.java
----------------------------------------------------------------------
diff --git a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneInputFormat.java b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneInputFormat.java
new file mode 100644
index 0000000..b03adcd
--- /dev/null
+++ b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneInputFormat.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.mnemonic.hadoop.mapreduce;
+
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.fs.Path;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.NullWritable;
+
+/**
+ * A Mnemonic input format that satisfies the org.apache.hadoop.mapreduce API.
+ */
+public class MneInputFormat<V>
+    extends FileInputFormat<NullWritable, V> {
+
+  @Override
+  protected boolean isSplitable(JobContext context, Path filename) {
+    return false;
+  }
+
+  @Override
+  public RecordReader<NullWritable, V>
+      createRecordReader(InputSplit inputSplit,
+                         TaskAttemptContext taskAttemptContext
+                         ) throws IOException, InterruptedException {
+    MneMapreduceRecordReader<V> reader = new MneMapreduceRecordReader<V>();
+    reader.initialize(inputSplit, taskAttemptContext);
+    return reader;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/12fb0f6c/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneMapreduceRecordReader.java
----------------------------------------------------------------------
diff --git a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneMapreduceRecordReader.java b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneMapreduceRecordReader.java
new file mode 100644
index 0000000..2de2e64
--- /dev/null
+++ b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneMapreduceRecordReader.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.mnemonic.hadoop.mapreduce;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+import org.apache.mnemonic.DurableType;
+import org.apache.mnemonic.EntityFactoryProxy;
+import org.apache.mnemonic.NonVolatileMemAllocator;
+import org.apache.mnemonic.Utils;
+import org.apache.mnemonic.collections.DurableSinglyLinkedList;
+import org.apache.mnemonic.collections.DurableSinglyLinkedListFactory;
+import org.apache.mnemonic.hadoop.MneConfigHelper;
+
+/**
+ * This record reader implements the org.apache.hadoop.mapreduce API.
+ * @param <V> the type of the data item
+ */
+public class MneMapreduceRecordReader<V>
+    extends org.apache.hadoop.mapreduce.RecordReader<NullWritable, V> {
+
+  protected Configuration m_conf;
+  protected TaskAttemptContext m_context;
+  protected NonVolatileMemAllocator m_act;
+  protected Iterator<V> m_iter;
+  protected long m_slotkeyid;
+  protected DurableType[] m_gtypes;
+  protected EntityFactoryProxy[] m_efproxies;
+  protected String m_msvrname;
+
+  public MneMapreduceRecordReader() {
+  }
+
+  @Override
+  public void close() throws IOException {
+    m_act.close();
+  }
+
+  @Override
+  public void initialize(InputSplit inputSplit,
+                         TaskAttemptContext context) {
+    FileSplit split = (FileSplit) inputSplit;
+    m_context = context;
+    m_conf = m_context.getConfiguration();
+    m_msvrname = MneConfigHelper.getInputMemServiceName(m_conf);
+    m_gtypes = MneConfigHelper.getInputDurableTypes(m_conf);
+    m_efproxies = Utils.instantiateEntityFactoryProxies(
+        MneConfigHelper.getInputEntityFactoryProxies(m_conf));
+    m_slotkeyid = MneConfigHelper.getInputSlotKeyId(m_conf);
+    
+    DurableSinglyLinkedList<V> dsllist;
+
+    m_act = new NonVolatileMemAllocator(Utils.getNonVolatileMemoryAllocatorService(m_msvrname), 1024000L,
+        split.getPath().toString(), true);
+    long handler = m_act.getHandler(m_slotkeyid);
+    dsllist = DurableSinglyLinkedListFactory.restore(m_act, m_efproxies, 
+        m_gtypes, handler, false);
+    m_iter = dsllist.iterator();
+  }
+
+  @Override
+  public boolean nextKeyValue() throws IOException, InterruptedException {
+    return m_iter.hasNext();
+  }
+
+  @Override
+  public NullWritable getCurrentKey() throws IOException, InterruptedException {
+    return NullWritable.get();
+  }
+
+  @Override
+  public V getCurrentValue() throws IOException, InterruptedException {
+    return m_iter.next();
+  }
+
+  @Override
+  public float getProgress() throws IOException {
+    return 0.5f; /* TBD */
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/12fb0f6c/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneMapreduceRecordWriter.java
----------------------------------------------------------------------
diff --git a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneMapreduceRecordWriter.java b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneMapreduceRecordWriter.java
new file mode 100644
index 0000000..42e4a55
--- /dev/null
+++ b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneMapreduceRecordWriter.java
@@ -0,0 +1,195 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.mnemonic.hadoop.mapreduce;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+import org.apache.commons.lang3.tuple.Pair;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.mnemonic.ConfigurationException;
+import org.apache.mnemonic.Durable;
+import org.apache.mnemonic.DurableType;
+import org.apache.mnemonic.EntityFactoryProxy;
+import org.apache.mnemonic.NonVolatileMemAllocator;
+import org.apache.mnemonic.OutOfHybridMemory;
+import org.apache.mnemonic.Utils;
+import org.apache.mnemonic.hadoop.MneConfigHelper;
+import org.apache.mnemonic.collections.DurableSinglyLinkedList;
+import org.apache.mnemonic.collections.DurableSinglyLinkedListFactory;
+
+public class MneMapreduceRecordWriter<V> extends RecordWriter<NullWritable, V> {
+
+  protected Configuration m_conf;
+  protected TaskAttemptContext m_context;
+  protected NonVolatileMemAllocator m_act;
+  protected Iterator<V> m_iter;
+  protected long m_poolsz;
+  protected long m_slotkeyid;
+  protected DurableType[] m_gtypes;
+  protected EntityFactoryProxy[] m_efproxies;
+  protected String m_msvrname;
+  protected long m_poolidx = 0;
+  protected String m_outbname;
+  protected String m_outext;
+  protected Map<V, DurableSinglyLinkedList<V>> m_recordmap;
+  protected boolean m_newpool;
+  protected Pair<DurableType[], EntityFactoryProxy[]> m_recparmpair;
+  protected DurableSinglyLinkedList<V> m_listnode;
+
+  public MneMapreduceRecordWriter(TaskAttemptContext context, String outbname, String extension) {
+    this(context.getConfiguration());
+    m_context = context;
+    m_outbname = outbname;
+    m_outext = extension;
+    initNextPool();
+  }
+
+  protected MneMapreduceRecordWriter(Configuration conf) {
+    m_conf = conf;
+    m_msvrname = MneConfigHelper.getOutputMemServiceName(m_conf);
+    m_gtypes = MneConfigHelper.getOutputDurableTypes(m_conf);
+    m_efproxies = Utils.instantiateEntityFactoryProxies(MneConfigHelper.getOutputEntityFactoryProxies(m_conf));
+    m_recparmpair = Utils.shiftDurableParams(m_gtypes, m_efproxies, 1);
+    m_slotkeyid = MneConfigHelper.getOutputSlotKeyId(m_conf);
+    m_poolsz = MneConfigHelper.getOutputMemPoolSize(conf);
+    m_recordmap = new HashMap<V, DurableSinglyLinkedList<V>>();
+    if (m_gtypes.length < 1) {
+      throw new ConfigurationException("The durable type of record parameters does not exist");
+    } else {
+      if (DurableType.DURABLE == m_gtypes[0]
+          && m_efproxies.length < 1) { /* T.B.D. BUFFER & CHUNK */
+        throw new ConfigurationException("The durable entity proxy of record parameters does not exist");
+      }
+    }
+  }
+
+  protected Path genNextPoolPath() {
+    Path ret = new Path(FileOutputFormat.getOutputPath(m_context),
+        FileOutputFormat.getUniqueFile(m_context, String.format("%s-%05d", m_outbname, ++m_poolidx), m_outext));
+    return ret;
+  }
+
+  protected void initNextPool() {
+    if (m_act != null) {
+      m_act.close();
+    }
+    Path outpath = genNextPoolPath();
+    m_act = new NonVolatileMemAllocator(Utils.getNonVolatileMemoryAllocatorService(m_msvrname), m_poolsz,
+        outpath.toString(), true);
+    m_newpool = true;
+  }
+
+  public NonVolatileMemAllocator getAllocator() {
+    return m_act;
+  }
+
+  @SuppressWarnings("unchecked")
+  protected V createDurableObjectRecord() {
+    V ret = null;
+    ret = (V) m_efproxies[0].create(m_act, m_recparmpair.getRight(), m_recparmpair.getLeft(), false);
+    return ret;
+  }
+
+  public V newDurableObjectRecord() {
+    V ret = null;
+    DurableSinglyLinkedList<V> nv = null;
+    try {
+      nv = createDurableNode();
+      ret = createDurableObjectRecord();
+    } catch (OutOfHybridMemory e) {
+      if (nv != null) {
+        nv.destroy();
+      }
+      if (ret != null) {
+        ((Durable) ret).destroy();
+      }
+      initNextPool();
+      try { /* retry */
+        nv = createDurableNode();
+        ret = createDurableObjectRecord();
+      } catch (OutOfHybridMemory ee) {
+        if (nv != null) {
+          nv.destroy();
+        }
+        if (ret != null) {
+          ((Durable) ret).destroy();
+        }
+      }
+    }
+    if (ret != null) {
+      m_recordmap.put(ret, nv);
+    }
+    return ret;
+  }
+
+  protected DurableSinglyLinkedList<V> createDurableNode() {
+    DurableSinglyLinkedList<V> ret = null;
+    ret = DurableSinglyLinkedListFactory.create(m_act, m_efproxies, m_gtypes, false);
+    return ret;
+  }
+
+  @Override
+  public void write(NullWritable nullWritable, V v) throws IOException {
+    DurableSinglyLinkedList<V> nv = null;
+    if (null == v) {
+      return;
+    }
+    if (DurableType.DURABLE == m_gtypes[0]) {
+      if (m_recordmap.containsKey(v)) {
+        nv = m_recordmap.remove(v);
+      } else {
+        throw new RuntimeException("The record hasn't been created by newDurableObjectRecord()");
+      }
+    } else {
+      try {
+        nv = createDurableNode();
+      } catch (OutOfHybridMemory e) {
+        initNextPool();
+        nv = createDurableNode();
+      }
+    }
+    if (nv != null) {
+      nv.setItem(v, false);
+    }
+    if (m_newpool) {
+      m_act.setHandler(m_slotkeyid, nv.getHandler());
+      m_newpool = false;
+    } else {
+      m_listnode.setNext(nv, false);
+    }
+    m_listnode = nv;
+  }
+
+  @Override
+  public void close(TaskAttemptContext taskAttemptContext) throws IOException {
+    for (V k : m_recordmap.keySet()) {
+      m_recordmap.get(k).destroy();
+      ((Durable) k).destroy();
+    }
+    m_act.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/12fb0f6c/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneOutputFormat.java
----------------------------------------------------------------------
diff --git a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneOutputFormat.java b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneOutputFormat.java
new file mode 100644
index 0000000..143243c
--- /dev/null
+++ b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/mapreduce/MneOutputFormat.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.mnemonic.hadoop.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.mnemonic.hadoop.MneConfigHelper;
+
+/**
+ * A Mnemonic output format that satisfies the org.apache.hadoop.mapreduce API.
+ */
+public class MneOutputFormat<V extends Writable> extends FileOutputFormat<NullWritable, V> {
+
+  @Override
+  public RecordWriter<NullWritable, V> getRecordWriter(TaskAttemptContext taskAttemptContext) throws IOException {
+    return new MneMapreduceRecordWriter<V>(taskAttemptContext, getOutputName(taskAttemptContext),
+        MneConfigHelper.FILE_EXTENSION);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/12fb0f6c/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/resources/log4j.properties b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/resources/log4j.properties
new file mode 100644
index 0000000..24b0e3c
--- /dev/null
+++ b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/resources/log4j.properties
@@ -0,0 +1,33 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Root logger option
+log4j.rootLogger=INFO, file, stdout
+
+#to stdout
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+
+#to log.log file
+log4j.appender.file=org.apache.log4j.RollingFileAppender
+log4j.appender.file.File=log.log
+log4j.appender.file.MaxFileSize=10MB
+log4j.appender.file.MaxBackupIndex=10
+log4j.appender.file.layout=org.apache.log4j.PatternLayout
+log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/12fb0f6c/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/MneMapreduceIOTest.java
----------------------------------------------------------------------
diff --git a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/MneMapreduceIOTest.java b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/MneMapreduceIOTest.java
new file mode 100644
index 0000000..47b3c75
--- /dev/null
+++ b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/MneMapreduceIOTest.java
@@ -0,0 +1,131 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.mnemonic.mapreduce;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Random;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.InputFormat;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+import org.apache.mnemonic.DurableType;
+import org.apache.mnemonic.Utils;
+import org.apache.mnemonic.hadoop.MneConfigHelper;
+import org.apache.mnemonic.hadoop.mapreduce.MneInputFormat;
+import org.apache.mnemonic.hadoop.mapreduce.MneMapreduceRecordWriter;
+import org.apache.mnemonic.hadoop.mapreduce.MneOutputFormat;
+import org.testng.AssertJUnit;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+public class MneMapreduceIOTest {
+  private Path m_workdir;
+  private JobConf m_conf;
+  private FileSystem m_fs;
+  private Random m_rand;
+  private TaskAttemptID m_taid;
+  private TaskAttemptContext m_tacontext;
+  private long m_reccnt = 5000L;
+  private long m_sumage = 0L;
+
+  @BeforeClass
+  public void setUp() throws IOException {
+    m_workdir = new Path(
+        System.getProperty("test.tmp.dir", "target" + File.separator + "test" + File.separator + "tmp"));
+    m_conf = new JobConf();
+    m_rand = Utils.createRandom();
+
+    try {
+      m_fs = FileSystem.getLocal(m_conf).getRaw();
+      m_fs.delete(m_workdir, true);
+      m_fs.mkdirs(m_workdir);
+    } catch (IOException e) {
+      throw new IllegalStateException("bad fs init", e);
+    }
+
+    m_taid = new TaskAttemptID("jt", 0, TaskType.MAP, 0, 0);
+    m_tacontext = new TaskAttemptContextImpl(m_conf, m_taid);
+
+    m_conf.set("mapreduce.output.fileoutputformat.outputdir", m_workdir.toString());
+
+    MneConfigHelper.setInputMemServiceName(m_conf, "pmalloc");
+    MneConfigHelper.setInputSlotKeyId(m_conf, 3L);
+    MneConfigHelper.setInputDurableTypes(m_conf, new DurableType[] {DurableType.DURABLE});
+    MneConfigHelper.setInputEntityFactoryProxies(m_conf, new Class<?>[] {PersonListEFProxy.class});
+    MneConfigHelper.setOutputMemServiceName(m_conf, "pmalloc");
+    MneConfigHelper.setOutputSlotKeyId(m_conf, 3L);
+    MneConfigHelper.setOutputMemPoolSize(m_conf, 1024L * 1024 * 1024 * 4);
+    MneConfigHelper.setOutputDurableTypes(m_conf, new DurableType[] {DurableType.DURABLE});
+    MneConfigHelper.setOutputEntityFactoryProxies(m_conf, new Class<?>[] {PersonListEFProxy.class});
+
+  }
+
+  @AfterClass
+  public void tearDown() {
+
+  }
+
+  @Test(enabled = true)
+  public void testWritePersonData() throws Exception {
+    NullWritable nada = NullWritable.get();
+    OutputFormat<NullWritable, Person<Long>> outputFormat = new MneOutputFormat<Person<Long>>();
+    RecordWriter<NullWritable, Person<Long>> writer = outputFormat.getRecordWriter(m_tacontext);
+    Person<Long> person = null;
+    for (int i = 0; i < m_reccnt; ++i) {
+      person = ((MneMapreduceRecordWriter<Person<Long>>) writer).newDurableObjectRecord();
+      person.setAge((short) m_rand.nextInt(50));
+      person.setName(String.format("Name: [%s]", Utils.genRandomString()), true);
+      m_sumage += person.getAge();
+      writer.write(nada, person);
+    }
+    writer.close(m_tacontext);
+  }
+
+  @Test(enabled = true, dependsOnMethods = { "testWritePersonData" })
+  public void testReadPersonData() throws Exception {
+    long sumage = 0L;
+    FileSplit split = new FileSplit(
+        new Path(m_workdir,
+            String.format("part-00001-m-00000%s", MneConfigHelper.FILE_EXTENSION)), 0, 0L, new String[0]);
+    InputFormat<NullWritable, Person<Long>> inputFormat = new MneInputFormat<Person<Long>>();
+    RecordReader<NullWritable, Person<Long>> reader = inputFormat.createRecordReader(split, m_tacontext);
+    Person<Long> person = null;
+    for (int i = 0; i < m_reccnt; ++i) {
+      AssertJUnit.assertTrue(reader.nextKeyValue());
+      person = reader.getCurrentValue();
+      AssertJUnit.assertTrue(person.getAge() < 51);
+      sumage += person.getAge();
+    }
+    AssertJUnit.assertEquals(m_sumage, sumage);
+    reader.close();
+    System.out.println(String.format("The sum of ages is %d", sumage));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/12fb0f6c/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/Person.java
----------------------------------------------------------------------
diff --git a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/Person.java b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/Person.java
new file mode 100644
index 0000000..17dbc5c
--- /dev/null
+++ b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/Person.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.mnemonic.mapreduce;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.mnemonic.Durable;
+import org.apache.mnemonic.EntityFactoryProxy;
+import org.apache.mnemonic.DurableEntity;
+import org.apache.mnemonic.DurableGetter;
+import org.apache.mnemonic.DurableSetter;
+import org.apache.mnemonic.OutOfHybridMemory;
+import org.apache.mnemonic.RetrieveDurableEntityError;
+import org.apache.mnemonic.DurableType;
+
+/**
+ * A simple generic object used for testing and demo.
+ *
+ */
+
+@DurableEntity
+public abstract class Person<E> implements Durable, Comparable<Person<E>>, Writable {
+  E element;
+
+  @Override
+  public void initializeAfterCreate() {
+    System.out.println("Initializing After Created");
+  }
+
+  @Override
+  public void initializeAfterRestore() {
+    System.out.println("Initializing After Restored");
+  }
+
+  @Override
+  public void setupGenericInfo(EntityFactoryProxy[] efproxies, DurableType[] gftypes) {
+
+  }
+
+  public void testOutput() throws RetrieveDurableEntityError {
+    System.out.printf("Person %s, Age: %d ( %s ) \n", getName(), getAge(),
+        null == getMother() ? "No Recorded Mother" : "Has Recorded Mother");
+  }
+
+  public void testOutputAge() throws RetrieveDurableEntityError {
+    System.out.printf("(Person %s, Age: %d) ", getName(), getAge());
+  }
+
+  public int compareTo(Person<E> anotherPerson) {
+    int ret = 0;
+    if (0 == ret) {
+      ret = ((Short) getAge()).compareTo(anotherPerson.getAge());
+    }
+    if (0 == ret) {
+      ret = getName().compareTo(anotherPerson.getName());
+    }
+    return ret;
+  }
+
+  public void write(DataOutput out) throws IOException {
+
+  }
+
+  public void readFields(DataInput in) throws IOException {
+
+  }
+
+  @DurableGetter(Id = 1L)
+  public abstract short getAge();
+
+  @DurableSetter
+  public abstract void setAge(short age);
+
+  @DurableGetter(Id = 2L)
+  public abstract String getName() throws RetrieveDurableEntityError;
+
+  @DurableSetter
+  public abstract void setName(String name, boolean destroy) throws OutOfHybridMemory, RetrieveDurableEntityError;
+
+  @DurableGetter(Id = 3L)
+  public abstract Person<E> getMother() throws RetrieveDurableEntityError;
+
+  @DurableSetter
+  public abstract void setMother(Person<E> mother, boolean destroy) throws RetrieveDurableEntityError;
+
+  @DurableGetter(Id = 4L)
+  public abstract Person<E> getFather() throws RetrieveDurableEntityError;
+
+  @DurableSetter
+  public abstract void setFather(Person<E> mother, boolean destroy) throws RetrieveDurableEntityError;
+}

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/12fb0f6c/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/PersonListEFProxy.java
----------------------------------------------------------------------
diff --git a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/PersonListEFProxy.java b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/PersonListEFProxy.java
new file mode 100644
index 0000000..7b2ea71
--- /dev/null
+++ b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/PersonListEFProxy.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.mnemonic.mapreduce;
+
+
+import org.apache.mnemonic.DurableType;
+import org.apache.mnemonic.EntityFactoryProxy;
+import org.apache.mnemonic.RestorableAllocator;
+
+public class PersonListEFProxy implements EntityFactoryProxy {
+  @Override
+  public <A extends RestorableAllocator<A>> Person<Long> restore(
+      A allocator, EntityFactoryProxy[] factoryproxys,
+      DurableType[] gfields, long phandler, boolean autoreclaim) {
+    return PersonFactory.restore(allocator, factoryproxys, gfields, phandler, autoreclaim);
+  }
+  @Override
+  public <A extends RestorableAllocator<A>> Person<Long> create(
+      A allocator, EntityFactoryProxy[] factoryproxys,
+      DurableType[] gfields, boolean autoreclaim) {
+    return PersonFactory.create(allocator, factoryproxys, gfields, autoreclaim);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/12fb0f6c/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/resources/testng.xml
----------------------------------------------------------------------
diff --git a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/resources/testng.xml b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/resources/testng.xml
new file mode 100644
index 0000000..3dcc372
--- /dev/null
+++ b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/resources/testng.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+
+<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd">
+<suite name="Suite" verbose="1" parallel="tests" thread-count="1">
+  <test name="Test">
+    <classes>
+      <class name="org.apache.mnemonic.mapreduce"/> 
+    </classes>
+  </test> <!-- Test -->
+</suite> <!-- Suite -->
+
+
+<!--
+      <class name="org.apache.mnemonic.service.computingservice.DurableSinglyLinkedListNGPrintTest"/> 
+ -->

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/12fb0f6c/mnemonic-hadoop/pom.xml
----------------------------------------------------------------------
diff --git a/mnemonic-hadoop/pom.xml b/mnemonic-hadoop/pom.xml
new file mode 100644
index 0000000..acbf685
--- /dev/null
+++ b/mnemonic-hadoop/pom.xml
@@ -0,0 +1,179 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.mnemonic</groupId>
+    <artifactId>mnemonic-parent</artifactId>
+    <version>0.4.0-incubating-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>mnemonic-hadoop</artifactId>
+  <name>mnemonic-hadoop</name>
+  <packaging>pom</packaging>
+  <url>http://mnemonic.incubator.apache.org</url>
+
+  <properties>
+	<hadoop.version>2.7.3</hadoop.version>
+  </properties>
+
+  <modules>
+    <module>mnemonic-hadoop-mapreduce</module>
+  </modules>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.mnemonic</groupId>
+      <artifactId>mnemonic-core</artifactId>
+      <version>${project.version}</version>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.mnemonic</groupId>
+      <artifactId>mnemonic-collections</artifactId>
+      <version>${project.version}</version>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.flowcomputing.commons</groupId>
+      <artifactId>commons-primitives</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+
+    <!-- logging dependencies -->
+    <!-- assume all APIs will be used -->
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>jul-to-slf4j</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>jcl-over-slf4j</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-log4j12</artifactId>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <extensions>
+      <extension>
+        <groupId>kr.motd.maven</groupId>
+        <artifactId>os-maven-plugin</artifactId>
+        <version>1.4.0.Final</version>
+      </extension>
+    </extensions>
+    <plugins>
+    </plugins>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-jar-plugin</artifactId>
+          <version>2.6</version>
+          <configuration>
+            <forceCreation>true</forceCreation>
+            <archive>
+              <addMavenDescriptor>false</addMavenDescriptor>
+            </archive>
+          </configuration>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-shade-plugin</artifactId>
+          <version>2.4.3</version>
+          <configuration>
+            <minimizeJar>true</minimizeJar>
+            <filters>
+              <filter>
+                <artifact>*:*</artifact>
+                <includes>
+                </includes>
+                <excludes>
+                  <exclude>META-INFO/**/**</exclude>
+                  <exclude>META-INFO/services/**</exclude>
+                  <exclude>META-INFO/maven/**</exclude>
+                </excludes>
+              </filter>
+            </filters>
+            <outputDirectory>${service.basedir}/service-dist</outputDirectory>
+            <shadedArtifactAttached>true</shadedArtifactAttached>
+            <shadedClassifierName>${os.detected.classifier}</shadedClassifierName>
+          </configuration>
+          <executions>
+            <execution>
+              <phase>package</phase>
+              <goals>
+                <goal>shade</goal>
+              </goals>
+            </execution>
+          </executions>
+        </plugin>
+        <plugin>
+          <groupId>org.bsc.maven</groupId>
+          <artifactId>maven-processor-plugin</artifactId>
+          <executions>
+            <execution>
+              <id>process-test</id>
+              <goals>
+                <goal>process-test</goal>
+              </goals>
+              <phase>generate-test-sources</phase>
+              <configuration>
+                <compilerArguments>-XDenableSunApiLintControl</compilerArguments>
+                <processors>
+                  <processor>${project.groupId}.DurableEntityProcessor</processor>
+                </processors>
+              </configuration>
+            </execution>
+          </executions>
+        </plugin>
+      </plugins>
+    </pluginManagement>
+  </build>
+
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/12fb0f6c/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index c7c51ab..c2152c1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -93,6 +93,7 @@
     <module>mnemonic-memory-services</module>
     <module>mnemonic-computing-services</module>
     <module>mnemonic-benches</module>
+    <module>mnemonic-hadoop</module>
   </modules>
 
   <properties>


Mime
View raw message