mnemonic-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ga...@apache.org
Subject incubator-mnemonic git commit: MNEMONIC-195: Specify the base name of output partitions
Date Tue, 21 Feb 2017 17:51:00 GMT
Repository: incubator-mnemonic
Updated Branches:
  refs/heads/master 66fe6c032 -> d9600e4dd


MNEMONIC-195: Specify the base name of output partitions


Project: http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/commit/d9600e4d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/tree/d9600e4d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/diff/d9600e4d

Branch: refs/heads/master
Commit: d9600e4ddd4477359d743bf427513471806ca5a3
Parents: 66fe6c0
Author: Wang, Gang(Gary) <gang1.wang@intel.com>
Authored: Mon Feb 20 09:39:03 2017 -0800
Committer: Wang, Gang(Gary) <gang1.wang@intel.com>
Committed: Tue Feb 21 09:44:50 2017 -0800

----------------------------------------------------------------------
 README.md                                       |   2 +-
 build-tools/runall.sh                           |   4 +-
 .../apache/mnemonic/hadoop/MneConfigHelper.java |   4 +
 .../mnemonic/mapreduce/MneMapreduceIOTest.java  | 159 ------------------
 .../mapreduce/MneMapreducePersonDataTest.java   | 161 +++++++++++++++++++
 5 files changed, 168 insertions(+), 162 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/d9600e4d/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
index faba6da..c1ca034 100644
--- a/README.md
+++ b/README.md
@@ -290,7 +290,7 @@ To run several test cases:
   $ mvn -Dtest=DurableSinglyLinkedListNGSortTest test -pl mnemonic-computing-services/mnemonic-utilities-service
-DskipTests=false
   
   $ # a testcase for module "mnemonic-hadoop/mnemonic-hadoop-mapreduce" that requires 'pmalloc'
memory service to pass
-  $ mvn -Dtest=MneMapreduceIOTest test -pl mnemonic-hadoop/mnemonic-hadoop-mapreduce -DskipTests=false
+  $ mvn -Dtest=MneMapreducePersonDataTest test -pl mnemonic-hadoop/mnemonic-hadoop-mapreduce
-DskipTests=false
 ```
 
 

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/d9600e4d/build-tools/runall.sh
----------------------------------------------------------------------
diff --git a/build-tools/runall.sh b/build-tools/runall.sh
index 690e22e..83931a3 100755
--- a/build-tools/runall.sh
+++ b/build-tools/runall.sh
@@ -119,8 +119,8 @@ exit 1
 fi
 echo [SUCCESS] Test case DurableSinglyLinkedListNGSortTest for \"mnemonic-computing-services/mnemonic-utilities-service\"
is completed!
 
-echo [INFO] Running MneMapreduceIOTest for \"mnemonic-hadoop/mnemonic-hadoop-mapreduce\"...
-mvn -Dtest=MneMapreduceIOTest test -pl mnemonic-hadoop/mnemonic-hadoop-mapreduce -DskipTests=false
> testlog/MneMapreduceIOTest.log
+echo [INFO] Running MneMapreducePersonDataTest for \"mnemonic-hadoop/mnemonic-hadoop-mapreduce\"...
+mvn -Dtest=MneMapreducePersonDataTest test -pl mnemonic-hadoop/mnemonic-hadoop-mapreduce
-DskipTests=false > testlog/MneMapreducePersonDataTest.log
 if [ $? -gt 0 ]
 then
 echo [ERROR] This test case requires \"pmalloc\" memory service to pass, please check if
\"pmalloc\" has been configured correctly! If \"pmalloc\" is installed, please refer to testlog/MneMapreduceIOTest.log
for detailed information.

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/d9600e4d/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/MneConfigHelper.java
----------------------------------------------------------------------
diff --git a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/MneConfigHelper.java
b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/MneConfigHelper.java
index a91f00f..1a3eec3 100644
--- a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/MneConfigHelper.java
+++ b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/main/java/org/apache/mnemonic/hadoop/MneConfigHelper.java
@@ -56,6 +56,10 @@ public class MneConfigHelper {
     return conf.get(getConfigName(prefix, BASE_OUTPUT_NAME), DEFAULT_NAME_PART);
   }
 
+  public static void setBaseOutputName(Configuration conf, String prefix, String basename)
{
+    conf.set(getConfigName(prefix, BASE_OUTPUT_NAME), basename);
+  }
+
   public static void setDurableTypes(Configuration conf, String prefix, DurableType[] dtypes)
{
     String val = StringUtils.join(dtypes, ",");
     conf.set(getConfigName(prefix, DURABLE_TYPES), val);

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/d9600e4d/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/MneMapreduceIOTest.java
----------------------------------------------------------------------
diff --git a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/MneMapreduceIOTest.java
b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/MneMapreduceIOTest.java
deleted file mode 100644
index df18367..0000000
--- a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/MneMapreduceIOTest.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.mnemonic.mapreduce;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Random;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hadoop.mapreduce.TaskType;
-import org.apache.hadoop.mapreduce.lib.input.FileSplit;
-import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
-import org.apache.mnemonic.DurableType;
-import org.apache.mnemonic.Utils;
-import org.apache.mnemonic.hadoop.MneConfigHelper;
-import org.apache.mnemonic.hadoop.MneDurableInputValue;
-import org.apache.mnemonic.hadoop.MneDurableOutputSession;
-import org.apache.mnemonic.hadoop.MneDurableOutputValue;
-import org.apache.mnemonic.hadoop.mapreduce.MneInputFormat;
-import org.apache.mnemonic.hadoop.mapreduce.MneOutputFormat;
-import org.testng.AssertJUnit;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-public class MneMapreduceIOTest {
-
-  private static final String SERVICE_NAME = "pmalloc";
-  private static final long SLOT_KEY_ID = 5L;
-  private Path m_workdir;
-  private JobConf m_conf;
-  private FileSystem m_fs;
-  private Random m_rand;
-  private TaskAttemptID m_taid;
-  private TaskAttemptContext m_tacontext;
-  private long m_reccnt = 500000L;
-  private long m_sumage = 0L;
-
-  @BeforeClass
-  public void setUp() throws IOException {
-    m_workdir = new Path(
-        System.getProperty("test.tmp.dir", "target" + File.separator + "test" + File.separator
+ "tmp"));
-    m_conf = new JobConf();
-    m_rand = Utils.createRandom();
-
-    try {
-      m_fs = FileSystem.getLocal(m_conf).getRaw();
-      m_fs.delete(m_workdir, true);
-      m_fs.mkdirs(m_workdir);
-    } catch (IOException e) {
-      throw new IllegalStateException("bad fs init", e);
-    }
-
-    m_taid = new TaskAttemptID("jt", 0, TaskType.MAP, 0, 0);
-    m_tacontext = new TaskAttemptContextImpl(m_conf, m_taid);
-
-    m_conf.set("mapreduce.output.fileoutputformat.outputdir", m_workdir.toString());
-
-    MneConfigHelper.setMemServiceName(m_conf, MneConfigHelper.DEFAULT_INPUT_CONFIG_PREFIX,
SERVICE_NAME);
-    MneConfigHelper.setSlotKeyId(m_conf, MneConfigHelper.DEFAULT_INPUT_CONFIG_PREFIX, SLOT_KEY_ID);
-    MneConfigHelper.setDurableTypes(m_conf,
-        MneConfigHelper.DEFAULT_INPUT_CONFIG_PREFIX, new DurableType[] {DurableType.DURABLE});
-    MneConfigHelper.setEntityFactoryProxies(m_conf,
-        MneConfigHelper.DEFAULT_INPUT_CONFIG_PREFIX, new Class<?>[] {PersonListEFProxy.class});
-    MneConfigHelper.setMemServiceName(m_conf, MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX,
SERVICE_NAME);
-    MneConfigHelper.setSlotKeyId(m_conf, MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX, SLOT_KEY_ID);
-    MneConfigHelper.setMemPoolSize(m_conf,
-        MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX, 1024L * 1024 * 1024 * 4);
-    MneConfigHelper.setDurableTypes(m_conf,
-        MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX, new DurableType[] {DurableType.DURABLE});
-    MneConfigHelper.setEntityFactoryProxies(m_conf,
-        MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX, new Class<?>[] {PersonListEFProxy.class});
-  }
-
-  @AfterClass
-  public void tearDown() {
-
-  }
-
-  @Test(enabled = true)
-  public void testWritePersonData() throws Exception {
-    NullWritable nada = NullWritable.get();
-    MneDurableOutputSession<Person<Long>> sess = new MneDurableOutputSession<Person<Long>>(m_tacontext);
-    sess.readConfig(MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX);
-    sess.initNextPool();
-    MneDurableOutputValue<Person<Long>> mdvalue =
-        new MneDurableOutputValue<Person<Long>>(sess);
-    OutputFormat<NullWritable, MneDurableOutputValue<Person<Long>>> outputFormat
=
-        new MneOutputFormat<MneDurableOutputValue<Person<Long>>>();
-    RecordWriter<NullWritable, MneDurableOutputValue<Person<Long>>> writer
=
-        outputFormat.getRecordWriter(m_tacontext);
-    Person<Long> person = null;
-    for (int i = 0; i < m_reccnt; ++i) {
-      person = sess.newDurableObjectRecord();
-      person.setAge((short) m_rand.nextInt(50));
-      person.setName(String.format("Name: [%s]", Utils.genRandomString()), true);
-      m_sumage += person.getAge();
-      writer.write(nada, mdvalue.of(person));
-    }
-    writer.close(m_tacontext);
-    sess.close();
-  }
-
-  @Test(enabled = true, dependsOnMethods = { "testWritePersonData" })
-  public void testReadPersonData() throws Exception {
-    long sumage = 0L;
-    long reccnt = 0L;
-    File folder = new File(m_workdir.toString());
-    File[] listfiles = folder.listFiles();
-    for (int idx = 0; idx < listfiles.length; ++idx) {
-      if (listfiles[idx].isFile()
-          && listfiles[idx].getName().endsWith(MneConfigHelper.DEFAULT_FILE_EXTENSION))
{
-        System.out.println(String.format("Verifying : %s", listfiles[idx].getName()));
-        FileSplit split = new FileSplit(
-            new Path(m_workdir, listfiles[idx].getName()), 0, 0L, new String[0]);
-        InputFormat<NullWritable, MneDurableInputValue<Person<Long>>> inputFormat
=
-            new MneInputFormat<MneDurableInputValue<Person<Long>>, Person<Long>>();
-        RecordReader<NullWritable, MneDurableInputValue<Person<Long>>>
reader =
-            inputFormat.createRecordReader(split, m_tacontext);
-        MneDurableInputValue<Person<Long>> personval = null;
-        while (reader.nextKeyValue()) {
-          personval = reader.getCurrentValue();
-          AssertJUnit.assertTrue(personval.getValue().getAge() < 51);
-          sumage += personval.getValue().getAge();
-          ++reccnt;
-        }
-        reader.close();
-      }
-    }
-    AssertJUnit.assertEquals(m_sumage, sumage);
-    AssertJUnit.assertEquals(m_reccnt, reccnt);
-    System.out.println(String.format("The sum of ages is %d", sumage));
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-mnemonic/blob/d9600e4d/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/MneMapreducePersonDataTest.java
----------------------------------------------------------------------
diff --git a/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/MneMapreducePersonDataTest.java
b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/MneMapreducePersonDataTest.java
new file mode 100644
index 0000000..7f3e8e2
--- /dev/null
+++ b/mnemonic-hadoop/mnemonic-hadoop-mapreduce/src/test/java/org/apache/mnemonic/mapreduce/MneMapreducePersonDataTest.java
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.mnemonic.mapreduce;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Random;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.InputFormat;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+import org.apache.mnemonic.DurableType;
+import org.apache.mnemonic.Utils;
+import org.apache.mnemonic.hadoop.MneConfigHelper;
+import org.apache.mnemonic.hadoop.MneDurableInputValue;
+import org.apache.mnemonic.hadoop.MneDurableOutputSession;
+import org.apache.mnemonic.hadoop.MneDurableOutputValue;
+import org.apache.mnemonic.hadoop.mapreduce.MneInputFormat;
+import org.apache.mnemonic.hadoop.mapreduce.MneOutputFormat;
+import org.testng.AssertJUnit;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+public class MneMapreducePersonDataTest {
+
+  private static final String SERVICE_NAME = "pmalloc";
+  private static final long SLOT_KEY_ID = 5L;
+  private Path m_workdir;
+  private JobConf m_conf;
+  private FileSystem m_fs;
+  private Random m_rand;
+  private TaskAttemptID m_taid;
+  private TaskAttemptContext m_tacontext;
+  private long m_reccnt = 500000L;
+  private long m_sumage = 0L;
+
+  @BeforeClass
+  public void setUp() throws IOException {
+    m_workdir = new Path(
+        System.getProperty("test.tmp.dir", "target" + File.separator + "test" + File.separator
+ "tmp"));
+    m_conf = new JobConf();
+    m_rand = Utils.createRandom();
+
+    try {
+      m_fs = FileSystem.getLocal(m_conf).getRaw();
+      m_fs.delete(m_workdir, true);
+      m_fs.mkdirs(m_workdir);
+    } catch (IOException e) {
+      throw new IllegalStateException("bad fs init", e);
+    }
+
+    m_taid = new TaskAttemptID("jt", 0, TaskType.MAP, 0, 0);
+    m_tacontext = new TaskAttemptContextImpl(m_conf, m_taid);
+
+    m_conf.set("mapreduce.output.fileoutputformat.outputdir", m_workdir.toString());
+    MneConfigHelper.setBaseOutputName(m_conf, null, "person-data");
+
+    MneConfigHelper.setMemServiceName(m_conf, MneConfigHelper.DEFAULT_INPUT_CONFIG_PREFIX,
SERVICE_NAME);
+    MneConfigHelper.setSlotKeyId(m_conf, MneConfigHelper.DEFAULT_INPUT_CONFIG_PREFIX, SLOT_KEY_ID);
+    MneConfigHelper.setDurableTypes(m_conf,
+        MneConfigHelper.DEFAULT_INPUT_CONFIG_PREFIX, new DurableType[] {DurableType.DURABLE});
+    MneConfigHelper.setEntityFactoryProxies(m_conf,
+        MneConfigHelper.DEFAULT_INPUT_CONFIG_PREFIX, new Class<?>[] {PersonListEFProxy.class});
+    MneConfigHelper.setMemServiceName(m_conf, MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX,
SERVICE_NAME);
+    MneConfigHelper.setSlotKeyId(m_conf, MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX, SLOT_KEY_ID);
+    MneConfigHelper.setMemPoolSize(m_conf,
+        MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX, 1024L * 1024 * 1024 * 4);
+    MneConfigHelper.setDurableTypes(m_conf,
+        MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX, new DurableType[] {DurableType.DURABLE});
+    MneConfigHelper.setEntityFactoryProxies(m_conf,
+        MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX, new Class<?>[] {PersonListEFProxy.class});
+  }
+
+  @AfterClass
+  public void tearDown() {
+
+  }
+
+  @Test(enabled = true)
+  public void testWritePersonData() throws Exception {
+    NullWritable nada = NullWritable.get();
+    MneDurableOutputSession<Person<Long>> sess = new MneDurableOutputSession<Person<Long>>(m_tacontext);
+    sess.readConfig(MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX);
+    sess.initNextPool();
+    MneDurableOutputValue<Person<Long>> mdvalue =
+        new MneDurableOutputValue<Person<Long>>(sess);
+    OutputFormat<NullWritable, MneDurableOutputValue<Person<Long>>> outputFormat
=
+        new MneOutputFormat<MneDurableOutputValue<Person<Long>>>();
+    RecordWriter<NullWritable, MneDurableOutputValue<Person<Long>>> writer
=
+        outputFormat.getRecordWriter(m_tacontext);
+    Person<Long> person = null;
+    for (int i = 0; i < m_reccnt; ++i) {
+      person = sess.newDurableObjectRecord();
+      person.setAge((short) m_rand.nextInt(50));
+      person.setName(String.format("Name: [%s]", Utils.genRandomString()), true);
+      m_sumage += person.getAge();
+      writer.write(nada, mdvalue.of(person));
+    }
+    writer.close(m_tacontext);
+    sess.close();
+  }
+
+  @Test(enabled = true, dependsOnMethods = { "testWritePersonData" })
+  public void testReadPersonData() throws Exception {
+    long sumage = 0L;
+    long reccnt = 0L;
+    File folder = new File(m_workdir.toString());
+    File[] listfiles = folder.listFiles();
+    for (int idx = 0; idx < listfiles.length; ++idx) {
+      if (listfiles[idx].isFile()
+          && listfiles[idx].getName().startsWith(MneConfigHelper.getBaseOutputName(m_conf,
null))
+          && listfiles[idx].getName().endsWith(MneConfigHelper.DEFAULT_FILE_EXTENSION))
{
+        System.out.println(String.format("Verifying : %s", listfiles[idx].getName()));
+        FileSplit split = new FileSplit(
+            new Path(m_workdir, listfiles[idx].getName()), 0, 0L, new String[0]);
+        InputFormat<NullWritable, MneDurableInputValue<Person<Long>>> inputFormat
=
+            new MneInputFormat<MneDurableInputValue<Person<Long>>, Person<Long>>();
+        RecordReader<NullWritable, MneDurableInputValue<Person<Long>>>
reader =
+            inputFormat.createRecordReader(split, m_tacontext);
+        MneDurableInputValue<Person<Long>> personval = null;
+        while (reader.nextKeyValue()) {
+          personval = reader.getCurrentValue();
+          AssertJUnit.assertTrue(personval.getValue().getAge() < 51);
+          sumage += personval.getValue().getAge();
+          ++reccnt;
+        }
+        reader.close();
+      }
+    }
+    AssertJUnit.assertEquals(m_sumage, sumage);
+    AssertJUnit.assertEquals(m_reccnt, reccnt);
+    System.out.println(String.format("The checksum of ages is %d", sumage));
+  }
+}


Mime
View raw message