hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r557098 - in /lucene/hadoop/trunk/src/contrib/hbase: ./ src/java/org/apache/hadoop/hbase/ src/test/org/apache/hadoop/hbase/
Date Wed, 18 Jul 2007 00:38:59 GMT
Author: stack
Date: Tue Jul 17 17:38:58 2007
New Revision: 557098

URL: http://svn.apache.org/viewvc?view=rev&rev=557098
Log:
HADOOP-1616 [hbase] Sporadic TestTable failures

Modified:
    lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHLog.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHRegion.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestMergeTable.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner2.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestTableMapReduce.java

Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt?view=diff&rev=557098&r1=557097&r2=557098
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt Tue Jul 17 17:38:58 2007
@@ -64,4 +64,5 @@
  40. HADOOP-1607 [shell] Clear screen command (Edward Yoon via Stack)
  41. HADOOP-1614 [hbase] HClient does not protect itself from simultaneous updates
  42. HADOOP-1468 Add HBase batch update to reduce RPC overhead
+ 43. HADOOP-1616 Sporadic TestTable failures
 

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java?view=diff&rev=557098&r1=557097&r2=557098
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java Tue Jul
17 17:38:58 2007
@@ -37,9 +37,9 @@
  *
  * <p>A single HLog is used by several HRegions simultaneously.
  * 
- * <p>Each HRegion is identified by a unique long int. HRegions do not need to
- * declare themselves before using the HLog; they simply include their
- * HRegion-id in the {@link #append(Text, Text, Text, TreeMap, long)} or 
+ * <p>Each HRegion is identified by a unique long <code>int</code>. HRegions
do
+ * not need to declare themselves before using the HLog; they simply include
+ * their HRegion-id in the {@link #append(Text, Text, Text, TreeMap, long)} or 
  * {@link #completeCacheFlush(Text, Text, long)} calls.
  *
  * <p>An HLog consists of multiple on-disk files, which have a chronological
@@ -95,16 +95,14 @@
    * @throws IOException
    */
   static void splitLog(Path rootDir, Path srcDir, FileSystem fs,
-      Configuration conf) throws IOException {
-    
+    Configuration conf) throws IOException {
+    Path logfiles[] = fs.listPaths(srcDir);
     if(LOG.isDebugEnabled()) {
-      LOG.debug("splitting log files");
+      LOG.debug("splitting " + logfiles.length + " log files in " +
+        srcDir.toString());
     }
-    
-    Path logfiles[] = fs.listPaths(srcDir);
     TreeMap<Text, SequenceFile.Writer> logWriters =
       new TreeMap<Text, SequenceFile.Writer>();
-    
     try {
       for(int i = 0; i < logfiles.length; i++) {
         SequenceFile.Reader in =
@@ -115,40 +113,35 @@
           while(in.next(key, val)) {
             Text regionName = key.getRegionName();
             SequenceFile.Writer w = logWriters.get(regionName);
-            if(w == null) {
+            if (w == null) {
               Path logfile = new Path(HStoreFile.getHRegionDir(rootDir,
                   regionName), HREGION_OLDLOGFILE_NAME);
-              
               w = SequenceFile.createWriter(fs, conf, logfile, HLogKey.class,
                   HLogEdit.class);
               logWriters.put(regionName, w);
             }
             w.append(key, val);
           }
-          
         } finally {
           in.close();
         }
       }
-      
     } finally {
-      for(SequenceFile.Writer w: logWriters.values()) {
+      for (SequenceFile.Writer w: logWriters.values()) {
         w.close();
       }
     }
     
     if(fs.exists(srcDir)) {
-      
       if(! fs.delete(srcDir)) {
         LOG.error("Cannot delete: " + srcDir);
-        
         if(! FileUtil.fullyDelete(new File(srcDir.toString()))) {
           throw new IOException("Cannot delete: " + srcDir);
         }
       }
     }
     if(LOG.isDebugEnabled()) {
-      LOG.debug("log file splitting completed");
+      LOG.debug("log file splitting completed for " + srcDir.toString());
     }
   }
 
@@ -213,25 +206,25 @@
           }
         }
         
-        if(LOG.isDebugEnabled()) {
-          LOG.debug("closing current log writer and getting a new one");
-        }
+
 
         // Close the current writer (if any), and grab a new one.
-        
         if(writer != null) {
           writer.close();
-          
-          if(filenum > 0) {
-            outputfiles.put(logSeqNum - 1, computeFilename(filenum - 1));
+          Path p = computeFilename(filenum - 1);
+          if(LOG.isDebugEnabled()) {
+            LOG.debug("Closing current log writer " + p.toString() +
+              " to get a new one");
+          }
+          if (filenum > 0) {
+            outputfiles.put(logSeqNum - 1, p);
           }
         }
-        
         Path newPath = computeFilename(filenum++);
-        this.writer = SequenceFile.createWriter(fs, conf, newPath, HLogKey.class, HLogEdit.class);
-
+        this.writer = SequenceFile.createWriter(fs, conf, newPath,
+          HLogKey.class, HLogEdit.class);
         if(LOG.isDebugEnabled()) {
-          LOG.debug("new log writer created");
+          LOG.debug("new log writer created at " + newPath);
         }
         
         // Can we delete any of the old log files?
@@ -239,8 +232,8 @@
         // over all the regions.
 
         long oldestOutstandingSeqNum = Long.MAX_VALUE;
-        for(Iterator<Long> it = regionToLastFlush.values().iterator(); it.hasNext();
) {
-          long curSeqNum = it.next().longValue();
+        for(Long l: regionToLastFlush.values()) {
+          long curSeqNum = l.longValue();
           
           if(curSeqNum < oldestOutstandingSeqNum) {
             oldestOutstandingSeqNum = curSeqNum;
@@ -249,14 +242,8 @@
 
         // Next, remove all files with a final ID that's older
         // than the oldest pending region-operation.
-
-        if(LOG.isDebugEnabled()) {
-          LOG.debug("removing old log files");
-        }
-        
-        for(Iterator<Long> it = outputfiles.keySet().iterator(); it.hasNext(); ) {
+        for(Iterator<Long> it = outputfiles.keySet().iterator(); it.hasNext();) {
           long maxSeqNum = it.next().longValue();
-          
           if(maxSeqNum < oldestOutstandingSeqNum) {
             Path p = outputfiles.get(maxSeqNum);
             it.remove();
@@ -269,16 +256,13 @@
       }
 
       // Actually delete them, if any!
-
       for(Iterator<Path> it = toDeleteList.iterator(); it.hasNext(); ) {
         Path p = it.next();
+        if(LOG.isDebugEnabled()) {
+          LOG.debug("removing old log file " + p.toString());
+        }
         fs.delete(p);
       }
-
-      if(LOG.isDebugEnabled()) {
-        LOG.debug("old log files deleted");
-      }
-      
       this.numEntries = 0;
     }
   }
@@ -307,13 +291,10 @@
    */
   synchronized void close() throws IOException {
     if(LOG.isDebugEnabled()) {
-      LOG.debug("closing log writer");
+      LOG.debug("closing log writer in " + this.dir.toString());
     }
     this.writer.close();
     this.closed = true;
-    if(LOG.isDebugEnabled()) {
-      LOG.debug("log writer closed");
-    }
   }
 
   /**

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java?view=diff&rev=557098&r1=557097&r2=557098
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java Tue
Jul 17 17:38:58 2007
@@ -1841,8 +1841,9 @@
       connection.commit(metaRegionName, clientId, lockid,
         System.currentTimeMillis());
 
-      // 4. Close the new region to flush it to disk
+      // 4. Close the new region to flush it to disk.  Close its log file too.
       r.close();
+      r.getLog().closeAndDelete();
 
       // 5. Get it assigned to a server
       unassignedRegions.put(regionName, info);

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java?view=diff&rev=557098&r1=557097&r2=557098
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java Tue
Jul 17 17:38:58 2007
@@ -436,7 +436,6 @@
       }
       try {
         return allHStoreFiles;
-
       } finally {
         synchronized (writestate) {
           writestate.closed = true;
@@ -1536,6 +1535,9 @@
   
   /**
    * Convenience method creating new HRegions.
+   * Note, this method creates an {@link HLog} for the created region. It
+   * needs to be closed explicitly.  Use {@link HRegion#getLog()} to get
+   * access.
    * @param regionId ID to use
    * @param tableDesc Descriptor
    * @param rootDir Root directory of HBase instance
@@ -1550,11 +1552,13 @@
     return createHRegion(new HRegionInfo(regionId, tableDesc, null, null),
       rootDir, conf, null);
   }
-  
+
   /**
    * Convenience method creating new HRegions. Used by createTable and by the
-   * bootstrap code in the HMaster constructor
-   * 
+   * bootstrap code in the HMaster constructor.
+   * Note, this method creates an {@link HLog} for the created region. It
+   * needs to be closed explicitly.  Use {@link HRegion#getLog()} to get
+   * access.
    * @param info Info for region to create.
    * @param rootDir Root directory for HBase instance
    * @param conf

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java?view=diff&rev=557098&r1=557097&r2=557098
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java
(original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java
Tue Jul 17 17:38:58 2007
@@ -95,8 +95,8 @@
       // Now create the root and meta regions and insert the data regions
       // created above into the meta
       
-      HRegion root = createNewHRegion(fs, dir, conf, HGlobals.rootTableDesc, 0L, null, null);
-      HRegion meta = createNewHRegion(fs, dir, conf, HGlobals.metaTableDesc, 1L, null, null);
+      HRegion root = createNewHRegion(dir, conf, HGlobals.rootTableDesc, 0L, null, null);
+      HRegion meta = createNewHRegion(dir, conf, HGlobals.metaTableDesc, 1L, null, null);
     
       HRegion.addRegionToMETA(root, meta);
       
@@ -129,7 +129,8 @@
 
   private HRegion createAregion(Text startKey, Text endKey, int firstRow, int nrows)
       throws IOException {
-    HRegion region = createNewHRegion(fs, dir, conf, desc, rand.nextLong(), startKey, endKey);
+    
+    HRegion region = createNewHRegion(dir, conf, desc, rand.nextLong(), startKey, endKey);
     
     System.out.println("created region " + region.getRegionName());
 

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/HBaseTestCase.java?view=diff&rev=557098&r1=557097&r2=557098
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
(original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
Tue Jul 17 17:38:58 2007
@@ -52,17 +52,15 @@
     return new Path(StaticTestEnvironment.TEST_DIRECTORY_KEY, testName);
   }
 
-  protected HRegion createNewHRegion(FileSystem fs, Path dir,
-      Configuration conf, HTableDescriptor desc, long regionId, Text startKey,
-      Text endKey) throws IOException {
-    
+  protected HRegion createNewHRegion(Path dir, Configuration c,
+    HTableDescriptor desc, long regionId, Text startKey, Text endKey)
+  throws IOException {
     HRegionInfo info = new HRegionInfo(regionId, desc, startKey, endKey);
     Path regionDir = HStoreFile.getHRegionDir(dir, info.regionName);
+    FileSystem fs = dir.getFileSystem(c);
     fs.mkdirs(regionDir);
-
     return new HRegion(dir,
       new HLog(fs, new Path(regionDir, HConstants.HREGION_LOGDIR_NAME), conf),
       fs, conf, info, null);
   }
-  
-}
+}
\ No newline at end of file

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java?view=diff&rev=557098&r1=557097&r2=557098
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java
(original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java
Tue Jul 17 17:38:58 2007
@@ -266,7 +266,13 @@
       }
     }
     LOG.info("HBase Cluster shutdown complete");
-
+    
+    // Close the file system.  Will complain if files open so helps w/ leaks.
+    try {
+      this.cluster.getFileSystem().close();
+    } catch (IOException e) {
+      LOG.error("Closing down dfs", e);
+    }
     if(cluster != null) {
       LOG.info("Shutting down Mini DFS cluster");
       cluster.shutdown();

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHLog.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHLog.java?view=diff&rev=557098&r1=557097&r2=557098
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHLog.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHLog.java Tue
Jul 17 17:38:58 2007
@@ -88,7 +88,7 @@
         }
       } finally {
         if (log != null) {
-          log.close();
+          log.closeAndDelete();
         }
         if (reader != null) {
           reader.close();

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHRegion.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHRegion.java?view=diff&rev=557098&r1=557097&r2=557098
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHRegion.java
(original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHRegion.java
Tue Jul 17 17:38:58 2007
@@ -610,7 +610,7 @@
 
     System.out.println("Merge regions elapsed time: "
         + ((System.currentTimeMillis() - startTime) / 1000.0));
-
+    
     fs.delete(oldRegionPath);
     fs.delete(oldRegion1);
     fs.delete(oldRegion2);
@@ -812,7 +812,11 @@
   private void cleanup() {
 
     // Shut down the mini cluster
-
+    try {
+      log.closeAndDelete();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
     cluster.shutdown();
     cluster = null;
 

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestMergeTable.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestMergeTable.java?view=diff&rev=557098&r1=557097&r2=557098
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestMergeTable.java
(original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestMergeTable.java
Tue Jul 17 17:38:58 2007
@@ -19,21 +19,16 @@
  */
 package org.apache.hadoop.hbase;
 
+import java.io.IOException;
+
 public class TestMergeTable extends AbstractMergeTestBase {
 
-  public void testMergeTable() {
+  public void testMergeTable() throws IOException {
+    MiniHBaseCluster hCluster = new MiniHBaseCluster(conf, 1, dfsCluster);
     try {
-      MiniHBaseCluster hCluster = new MiniHBaseCluster(conf, 1, dfsCluster);
-      try {
-        HMerge.merge(conf, fs, desc.getName());
-      
-      } finally {
-        hCluster.shutdown();
-      }
-      
-    } catch(Throwable t) {
-      t.printStackTrace();
-      fail();
+      HMerge.merge(conf, fs, desc.getName());
+    } finally {
+      hCluster.shutdown();
     }
   }
-}
+}
\ No newline at end of file

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner.java?view=diff&rev=557098&r1=557097&r2=557098
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner.java
(original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner.java
Tue Jul 17 17:38:58 2007
@@ -260,7 +260,6 @@
       
       region.close();
       log.closeAndDelete();
-
     } catch(IOException e) {
       e.printStackTrace();
       throw e;

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner2.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner2.java?view=diff&rev=557098&r1=557097&r2=557098
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner2.java
(original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner2.java
Tue Jul 17 17:38:58 2007
@@ -67,12 +67,19 @@
     newRegions.add(HRegion.createHRegion(
       new HRegionInfo(3L, desc, new Text("midway"), null),
         homedir, this.conf, null));
-    for (HRegion r: newRegions) {
-      HRegion.addRegionToMETA(client, HConstants.META_TABLE_NAME, r,
-        this.cluster.getHMasterAddress(), -1L);
+    try {
+      for (HRegion r : newRegions) {
+        HRegion.addRegionToMETA(client, HConstants.META_TABLE_NAME, r,
+            this.cluster.getHMasterAddress(), -1L);
+      }
+      regions = scan(client, HConstants.META_TABLE_NAME);
+      assertEquals("Should be two regions only", 2, regions.size());
+    } finally {
+      for (HRegion r : newRegions) {
+        r.close();
+        r.getLog().closeAndDelete();
+      }
     }
-    regions = scan(client, HConstants.META_TABLE_NAME);
-    assertEquals("Should be two regions only", 2, regions.size());
   }
   
   private List<HRegionInfo> scan(final HClient client, final Text table)

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestTableMapReduce.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestTableMapReduce.java?view=diff&rev=557098&r1=557097&r2=557098
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestTableMapReduce.java
(original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestTableMapReduce.java
Tue Jul 17 17:38:58 2007
@@ -89,11 +89,11 @@
 
       // create the root and meta regions and insert the data region into the meta
 
-      HRegion root = createNewHRegion(fs, dir, conf, HGlobals.rootTableDesc, 0L, null, null);
-      HRegion meta = createNewHRegion(fs, dir, conf, HGlobals.metaTableDesc, 1L, null, null);
+      HRegion root = createNewHRegion(dir, conf, HGlobals.rootTableDesc, 0L, null, null);
+      HRegion meta = createNewHRegion(dir, conf, HGlobals.metaTableDesc, 1L, null, null);
       HRegion.addRegionToMETA(root, meta);
 
-      HRegion region = createNewHRegion(fs, dir, conf, desc, rand.nextLong(), null, null);
+      HRegion region = createNewHRegion(dir, conf, desc, rand.nextLong(), null, null);
       HRegion.addRegionToMETA(meta, region);
 
       // insert some data into the test table



Mime
View raw message