accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ktur...@apache.org
Subject svn commit: r1370925 [1/2] - in /accumulo/trunk: core/src/main/java/org/apache/accumulo/core/ core/src/main/java/org/apache/accumulo/core/client/admin/ core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ core/src/main/java/org/apache/accumu...
Date Wed, 08 Aug 2012 20:02:59 GMT
Author: kturner
Date: Wed Aug  8 20:02:58 2012
New Revision: 1370925

URL: http://svn.apache.org/viewvc?rev=1370925&view=rev
Log:
ACCUMULO-456 initial checkin of export/import table

Added:
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/shell/commands/ExportTableCommand.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/shell/commands/ImportTableCommand.java
    accumulo/trunk/docs/examples/README.export
    accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/tableOps/ExportTable.java
    accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/tableOps/ImportTable.java
    accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/randomwalk/shard/ExportIndex.java
Modified:
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/Constants.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperations.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperationsImpl.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ClientService.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/TableOperation.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mock/MockTableOperations.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/conf/Property.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/data/thrift/MultiScanResult.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/data/thrift/UpdateErrors.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterClientService.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterMonitorInfo.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/TableOperation.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/TabletServerStatus.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/tabletserver/thrift/ActiveScan.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/tabletserver/thrift/TIteratorSetting.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/tabletserver/thrift/TabletClientService.java
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/shell/Shell.java
    accumulo/trunk/core/src/main/thrift/client.thrift
    accumulo/trunk/core/src/main/thrift/master.thrift
    accumulo/trunk/core/src/test/java/org/apache/accumulo/core/client/admin/TableOperationsHelperTest.java
    accumulo/trunk/docs/src/user_manual/chapters/table_configuration.tex
    accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/Master.java
    accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/state/tables/TableManager.java
    accumulo/trunk/test/system/randomwalk/conf/modules/Shard.xml

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/Constants.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/Constants.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/Constants.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/Constants.java Wed Aug  8 20:02:58 2012
@@ -160,6 +160,11 @@ public class Constants {
   public static final String MAPFILE_EXTENSION = "map";
   public static final String GENERATED_TABLET_DIRECTORY_PREFIX = "t-";
   
+  public static final String EXPORT_METADATA_FILE = "metadata.bin";
+  public static final String EXPORT_TABLE_CONFIG_FILE = "table_config.txt";
+  public static final String EXPORT_FILE = "exportMetadata.zip";
+  public static final String EXPORT_INFO_FILE = "accumulo_export_info.txt";
+
   public static String getBaseDir(AccumuloConfiguration conf) {
     return conf.get(Property.INSTANCE_DFS_DIR);
   }
@@ -191,5 +196,4 @@ public class Constants {
   public static String getWalDirectory(AccumuloConfiguration conf) {
     return getBaseDir(conf) + "/wal";
   }
-  
 }

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperations.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperations.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperations.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperations.java Wed Aug  8 20:02:58 2012
@@ -98,6 +98,36 @@ public interface TableOperations {
   public void create(String tableName, boolean versioningIter, TimeType timeType) throws AccumuloException, AccumuloSecurityException, TableExistsException;
   
   /**
+   * Imports a table exported via exportTable and copied via hadoop distcp.
+   * 
+   * @param tableName
+   *          Name of a table to create and import into.
+   * @param importDir
+   *          Directory that contains the files copied by distcp from exportTable
+   * @throws TableExistsException
+   * @throws AccumuloException
+   * @throws AccumuloSecurityException
+   */
+  public void importTable(String tableName, String importDir) throws TableExistsException, AccumuloException, AccumuloSecurityException;
+  
+  /**
+   * Exports a table. The tables data is not exported, just table metadata and a list of files to distcp. The table being exported must be offline and stay
+   * offline for the duration of distcp. To avoid losing access to a table it can be cloned and the clone taken offline for export.
+   * 
+   * <p>
+   * See docs/examples/README.export
+   * 
+   * @param tableName
+   *          Name of the table to export.
+   * @param exportDir
+   *          An empty directory in HDFS where files containing table metadata and list of files to distcp will be placed.
+   * @throws TableNotFoundException
+   * @throws AccumuloException
+   * @throws AccumuloSecurityException
+   */
+  public void exportTable(String tableName, String exportDir) throws TableNotFoundException, AccumuloException, AccumuloSecurityException;
+
+  /**
    * @param tableName
    *          the name of the table
    * @param partitionKeys

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperationsImpl.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperationsImpl.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperationsImpl.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperationsImpl.java Wed Aug  8 20:02:58 2012
@@ -16,7 +16,9 @@
  */
 package org.apache.accumulo.core.client.admin;
 
+import java.io.BufferedReader;
 import java.io.IOException;
+import java.io.InputStreamReader;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -37,6 +39,8 @@ import java.util.concurrent.ExecutorServ
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicReference;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
 
 import org.apache.accumulo.cloudtrace.instrument.Tracer;
 import org.apache.accumulo.core.Constants;
@@ -65,6 +69,7 @@ import org.apache.accumulo.core.conf.Pro
 import org.apache.accumulo.core.data.ByteSequence;
 import org.apache.accumulo.core.data.KeyExtent;
 import org.apache.accumulo.core.data.Range;
+import org.apache.accumulo.core.file.FileUtil;
 import org.apache.accumulo.core.iterators.IteratorUtil;
 import org.apache.accumulo.core.master.state.tables.TableState;
 import org.apache.accumulo.core.master.thrift.MasterClientService;
@@ -1003,7 +1008,7 @@ public class TableOperationsImpl extends
   public void importDirectory(String tableName, String dir, String failureDir, boolean setTime) throws IOException, AccumuloSecurityException,
       TableNotFoundException, AccumuloException {
     ArgumentChecker.notNull(tableName, dir, failureDir);
-    FileSystem fs = FileSystem.get(CachedConfiguration.getInstance());
+    FileSystem fs = FileUtil.getFileSystem(CachedConfiguration.getInstance(), instance.getConfiguration());
     Path failPath = fs.makeQualified(new Path(failureDir));
     if (!fs.exists(new Path(dir)))
       throw new AccumuloException("Bulk import directory " + dir + " does not exist!");
@@ -1107,4 +1112,76 @@ public class TableOperationsImpl extends
     Scanner scanner = instance.getConnector(credentials).createScanner(tableName, auths);
     return FindMax.findMax(scanner, startRow, startInclusive, endRow, endInclusive);
   }
+  
+  public static Map<String,String> getExportedProps(FileSystem fs, Path path) throws IOException {
+    HashMap<String,String> props = new HashMap<String,String>();
+    
+    ZipInputStream zis = new ZipInputStream(fs.open(path));
+    try {
+      ZipEntry zipEntry;
+      while ((zipEntry = zis.getNextEntry()) != null) {
+        if (zipEntry.getName().equals(Constants.EXPORT_TABLE_CONFIG_FILE)) {
+          BufferedReader in = new BufferedReader(new InputStreamReader(zis));
+          String line;
+          while ((line = in.readLine()) != null) {
+            String sa[] = line.split("=", 2);
+            props.put(sa[0], sa[1]);
+          }
+          
+          break;
+        }
+      }
+    } finally {
+      zis.close();
+    }
+    return props;
+  }
+
+  @Override
+  public void importTable(String tableName, String importDir) throws TableExistsException, AccumuloException, AccumuloSecurityException {
+    ArgumentChecker.notNull(tableName, importDir);
+    
+    try{
+      FileSystem fs = FileUtil.getFileSystem(CachedConfiguration.getInstance(), instance.getConfiguration());;
+      Map<String,String> props = getExportedProps(fs, new Path(importDir, Constants.EXPORT_FILE));
+      
+      for(String propKey : props.keySet()){
+        if (Property.isClassProperty(propKey) && !props.get(propKey).contains(Constants.CORE_PACKAGE_NAME)) {
+          Logger.getLogger(this.getClass()).info(
+              "Imported table sets '" + propKey + "' to '" + props.get(propKey) + "'.  Ensure this class is on Accumulo classpath.");
+        }
+      }
+      
+    }catch(IOException ioe){
+      Logger.getLogger(this.getClass()).warn("Failed to check if imported table references external java classes : " + ioe.getMessage());
+    }
+    
+    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes()), ByteBuffer.wrap(importDir.getBytes()));
+    
+    Map<String,String> opts = Collections.emptyMap();
+    
+    try {
+      doTableOperation(TableOperation.IMPORT, args, opts);
+    } catch (TableNotFoundException e1) {
+      // should not happen
+      throw new RuntimeException(e1);
+    }
+    
+  }
+  
+  @Override
+  public void exportTable(String tableName, String exportDir) throws TableNotFoundException, AccumuloException, AccumuloSecurityException {
+    ArgumentChecker.notNull(tableName, exportDir);
+    
+    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes()), ByteBuffer.wrap(exportDir.getBytes()));
+    
+    Map<String,String> opts = Collections.emptyMap();
+    
+    try {
+      doTableOperation(TableOperation.EXPORT, args, opts);
+    } catch (TableExistsException e1) {
+      // should not happen
+      throw new RuntimeException(e1);
+    }
+  }
 }

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ClientService.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ClientService.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ClientService.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ClientService.java Wed Aug  8 20:02:58 2012
@@ -6174,8 +6174,6 @@ import org.slf4j.LoggerFactory;
 
     private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
       try {
-        // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
-        __isset_bit_vector = new BitSet(1);
         read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
       } catch (org.apache.thrift.TException te) {
         throw new java.io.IOException(te);
@@ -7981,8 +7979,6 @@ import org.slf4j.LoggerFactory;
 
     private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
       try {
-        // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
-        __isset_bit_vector = new BitSet(1);
         read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
       } catch (org.apache.thrift.TException te) {
         throw new java.io.IOException(te);
@@ -15299,8 +15295,6 @@ import org.slf4j.LoggerFactory;
 
     private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
       try {
-        // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
-        __isset_bit_vector = new BitSet(1);
         read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
       } catch (org.apache.thrift.TException te) {
         throw new java.io.IOException(te);
@@ -16586,8 +16580,6 @@ import org.slf4j.LoggerFactory;
 
     private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
       try {
-        // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
-        __isset_bit_vector = new BitSet(1);
         read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
       } catch (org.apache.thrift.TException te) {
         throw new java.io.IOException(te);
@@ -21863,7 +21855,7 @@ import org.slf4j.LoggerFactory;
                   for (int _i49 = 0; _i49 < _map48.size; ++_i49)
                   {
                     String _key50; // required
-                    String _val51; // optional
+                    String _val51; // required
                     _key50 = iprot.readString();
                     _val51 = iprot.readString();
                     struct.success.put(_key50, _val51);
@@ -21948,7 +21940,7 @@ import org.slf4j.LoggerFactory;
             for (int _i55 = 0; _i55 < _map54.size; ++_i55)
             {
               String _key56; // required
-              String _val57; // optional
+              String _val57; // required
               _key56 = iprot.readString();
               _val57 = iprot.readString();
               struct.success.put(_key56, _val57);
@@ -22709,7 +22701,7 @@ import org.slf4j.LoggerFactory;
                   for (int _i59 = 0; _i59 < _map58.size; ++_i59)
                   {
                     String _key60; // required
-                    String _val61; // optional
+                    String _val61; // required
                     _key60 = iprot.readString();
                     _val61 = iprot.readString();
                     struct.success.put(_key60, _val61);
@@ -22814,7 +22806,7 @@ import org.slf4j.LoggerFactory;
             for (int _i65 = 0; _i65 < _map64.size; ++_i65)
             {
               String _key66; // required
-              String _val67; // optional
+              String _val67; // required
               _key66 = iprot.readString();
               _val67 = iprot.readString();
               struct.success.put(_key66, _val67);
@@ -23647,8 +23639,6 @@ import org.slf4j.LoggerFactory;
 
     private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
       try {
-        // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
-        __isset_bit_vector = new BitSet(1);
         read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
       } catch (org.apache.thrift.TException te) {
         throw new java.io.IOException(te);

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/TableOperation.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/TableOperation.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/TableOperation.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/TableOperation.java Wed Aug  8 20:02:58 2012
@@ -25,7 +25,9 @@ public enum TableOperation implements or
   MERGE(10),
   DELETE_RANGE(11),
   BULK_IMPORT(12),
-  COMPACT(13);
+  COMPACT(13),
+  IMPORT(14),
+  EXPORT(15);
 
   private final int value;
 
@@ -74,6 +76,10 @@ public enum TableOperation implements or
         return BULK_IMPORT;
       case 13:
         return COMPACT;
+      case 14:
+        return IMPORT;
+      case 15:
+        return EXPORT;
       default:
         return null;
     }

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mock/MockTableOperations.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mock/MockTableOperations.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mock/MockTableOperations.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mock/MockTableOperations.java Wed Aug  8 20:02:58 2012
@@ -227,4 +227,14 @@ public class MockTableOperations extends
     
     return FindMax.findMax(new MockScanner(table, auths), startRow, startInclusive, endRow, endInclusive);
   }
+  
+  @Override
+  public void importTable(String tableName, String exportDir) throws TableExistsException, AccumuloException, AccumuloSecurityException {
+    throw new NotImplementedException();
+  }
+  
+  @Override
+  public void exportTable(String tableName, String exportDir) throws TableNotFoundException, AccumuloException, AccumuloSecurityException {
+    throw new NotImplementedException();
+  }
 }

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/conf/Property.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/conf/Property.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/conf/Property.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/conf/Property.java Wed Aug  8 20:02:58 2012
@@ -357,4 +357,15 @@ public enum Property {
         return prop;
     return null;
   }
+  
+  /**
+   * 
+   * @param key
+   * @return true if this is a property whose value is expected to be a java class
+   */
+  public static boolean isClassProperty(String key) {
+    return (key.startsWith(Property.TABLE_CONSTRAINT_PREFIX.getKey()) && key.substring(Property.TABLE_CONSTRAINT_PREFIX.getKey().length()).split("\\.").length == 1)
+        || (key.startsWith(Property.TABLE_ITERATOR_PREFIX.getKey()) && key.substring(Property.TABLE_ITERATOR_PREFIX.getKey().length()).split("\\.").length == 2)
+        || key.equals(Property.TABLE_LOAD_BALANCER.getKey());
+  }
 }

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/data/thrift/MultiScanResult.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/data/thrift/MultiScanResult.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/data/thrift/MultiScanResult.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/data/thrift/MultiScanResult.java Wed Aug  8 20:02:58 2012
@@ -849,7 +849,7 @@ import org.slf4j.LoggerFactory;
                 for (int _i20 = 0; _i20 < _map19.size; ++_i20)
                 {
                   TKeyExtent _key21; // required
-                  List<TRange> _val22; // optional
+                  List<TRange> _val22; // required
                   _key21 = new TKeyExtent();
                   _key21.read(iprot);
                   {
@@ -1114,7 +1114,7 @@ import org.slf4j.LoggerFactory;
           for (int _i41 = 0; _i41 < _map40.size; ++_i41)
           {
             TKeyExtent _key42; // required
-            List<TRange> _val43; // optional
+            List<TRange> _val43; // required
             _key42 = new TKeyExtent();
             _key42.read(iprot);
             {

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/data/thrift/UpdateErrors.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/data/thrift/UpdateErrors.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/data/thrift/UpdateErrors.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/data/thrift/UpdateErrors.java Wed Aug  8 20:02:58 2012
@@ -539,7 +539,7 @@ import org.slf4j.LoggerFactory;
                 for (int _i51 = 0; _i51 < _map50.size; ++_i51)
                 {
                   TKeyExtent _key52; // required
-                  long _val53; // optional
+                  long _val53; // required
                   _key52 = new TKeyExtent();
                   _key52.read(iprot);
                   _val53 = iprot.readI64();
@@ -711,7 +711,7 @@ import org.slf4j.LoggerFactory;
           for (int _i67 = 0; _i67 < _map66.size; ++_i67)
           {
             TKeyExtent _key68; // required
-            long _val69; // optional
+            long _val69; // required
             _key68 = new TKeyExtent();
             _key68.read(iprot);
             _val69 = iprot.readI64();

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterClientService.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterClientService.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterClientService.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterClientService.java Wed Aug  8 20:02:58 2012
@@ -2538,8 +2538,6 @@ import org.slf4j.LoggerFactory;
 
     private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
       try {
-        // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
-        __isset_bit_vector = new BitSet(1);
         read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
       } catch (org.apache.thrift.TException te) {
         throw new java.io.IOException(te);
@@ -14353,8 +14351,6 @@ import org.slf4j.LoggerFactory;
 
     private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
       try {
-        // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
-        __isset_bit_vector = new BitSet(1);
         read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
       } catch (org.apache.thrift.TException te) {
         throw new java.io.IOException(te);
@@ -15337,7 +15333,7 @@ import org.slf4j.LoggerFactory;
                   for (int _i74 = 0; _i74 < _map73.size; ++_i74)
                   {
                     String _key75; // required
-                    String _val76; // optional
+                    String _val76; // required
                     _key75 = iprot.readString();
                     _val76 = iprot.readString();
                     struct.options.put(_key75, _val76);
@@ -15536,7 +15532,7 @@ import org.slf4j.LoggerFactory;
             for (int _i85 = 0; _i85 < _map84.size; ++_i85)
             {
               String _key86; // required
-              String _val87; // optional
+              String _val87; // required
               _key86 = iprot.readString();
               _val87 = iprot.readString();
               struct.options.put(_key86, _val87);

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterMonitorInfo.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterMonitorInfo.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterMonitorInfo.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterMonitorInfo.java Wed Aug  8 20:02:58 2012
@@ -1002,7 +1002,7 @@ import org.slf4j.LoggerFactory;
                 for (int _i19 = 0; _i19 < _map18.size; ++_i19)
                 {
                   String _key20; // required
-                  TableInfo _val21; // optional
+                  TableInfo _val21; // required
                   _key20 = iprot.readString();
                   _val21 = new TableInfo();
                   _val21.read(iprot);
@@ -1042,7 +1042,7 @@ import org.slf4j.LoggerFactory;
                 for (int _i26 = 0; _i26 < _map25.size; ++_i26)
                 {
                   String _key27; // required
-                  byte _val28; // optional
+                  byte _val28; // required
                   _key27 = iprot.readString();
                   _val28 = iprot.readByte();
                   struct.badTServers.put(_key27, _val28);
@@ -1317,7 +1317,7 @@ import org.slf4j.LoggerFactory;
           for (int _i46 = 0; _i46 < _map45.size; ++_i46)
           {
             String _key47; // required
-            TableInfo _val48; // optional
+            TableInfo _val48; // required
             _key47 = iprot.readString();
             _val48 = new TableInfo();
             _val48.read(iprot);
@@ -1347,7 +1347,7 @@ import org.slf4j.LoggerFactory;
           for (int _i53 = 0; _i53 < _map52.size; ++_i53)
           {
             String _key54; // required
-            byte _val55; // optional
+            byte _val55; // required
             _key54 = iprot.readString();
             _val55 = iprot.readByte();
             struct.badTServers.put(_key54, _val55);

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/TableOperation.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/TableOperation.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/TableOperation.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/TableOperation.java Wed Aug  8 20:02:58 2012
@@ -21,7 +21,9 @@ public enum TableOperation implements or
   MERGE(6),
   DELETE_RANGE(7),
   BULK_IMPORT(8),
-  COMPACT(9);
+  COMPACT(9),
+  IMPORT(10),
+  EXPORT(11);
 
   private final int value;
 
@@ -62,6 +64,10 @@ public enum TableOperation implements or
         return BULK_IMPORT;
       case 9:
         return COMPACT;
+      case 10:
+        return IMPORT;
+      case 11:
+        return EXPORT;
       default:
         return null;
     }

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/TabletServerStatus.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/TabletServerStatus.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/TabletServerStatus.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/master/thrift/TabletServerStatus.java Wed Aug  8 20:02:58 2012
@@ -1108,7 +1108,7 @@ import org.slf4j.LoggerFactory;
                 for (int _i1 = 0; _i1 < _map0.size; ++_i1)
                 {
                   String _key2; // required
-                  TableInfo _val3; // optional
+                  TableInfo _val3; // required
                   _key2 = iprot.readString();
                   _val3 = new TableInfo();
                   _val3.read(iprot);
@@ -1392,7 +1392,7 @@ import org.slf4j.LoggerFactory;
           for (int _i12 = 0; _i12 < _map11.size; ++_i12)
           {
             String _key13; // required
-            TableInfo _val14; // optional
+            TableInfo _val14; // required
             _key13 = iprot.readString();
             _val14 = new TableInfo();
             _val14.read(iprot);

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/tabletserver/thrift/ActiveScan.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/tabletserver/thrift/ActiveScan.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/tabletserver/thrift/ActiveScan.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/tabletserver/thrift/ActiveScan.java Wed Aug  8 20:02:58 2012
@@ -1300,7 +1300,7 @@ import org.slf4j.LoggerFactory;
                 for (int _i15 = 0; _i15 < _map14.size; ++_i15)
                 {
                   String _key16; // required
-                  Map<String,String> _val17; // optional
+                  Map<String,String> _val17; // required
                   _key16 = iprot.readString();
                   {
                     org.apache.thrift.protocol.TMap _map18 = iprot.readMapBegin();
@@ -1308,7 +1308,7 @@ import org.slf4j.LoggerFactory;
                     for (int _i19 = 0; _i19 < _map18.size; ++_i19)
                     {
                       String _key20; // required
-                      String _val21; // optional
+                      String _val21; // required
                       _key20 = iprot.readString();
                       _val21 = iprot.readString();
                       _val17.put(_key20, _val21);
@@ -1605,7 +1605,7 @@ import org.slf4j.LoggerFactory;
           for (int _i37 = 0; _i37 < _map36.size; ++_i37)
           {
             String _key38; // required
-            Map<String,String> _val39; // optional
+            Map<String,String> _val39; // required
             _key38 = iprot.readString();
             {
               org.apache.thrift.protocol.TMap _map40 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
@@ -1613,7 +1613,7 @@ import org.slf4j.LoggerFactory;
               for (int _i41 = 0; _i41 < _map40.size; ++_i41)
               {
                 String _key42; // required
-                String _val43; // optional
+                String _val43; // required
                 _key42 = iprot.readString();
                 _val43 = iprot.readString();
                 _val39.put(_key42, _val43);

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/tabletserver/thrift/TIteratorSetting.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/tabletserver/thrift/TIteratorSetting.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/tabletserver/thrift/TIteratorSetting.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/tabletserver/thrift/TIteratorSetting.java Wed Aug  8 20:02:58 2012
@@ -601,7 +601,7 @@ import org.slf4j.LoggerFactory;
                 for (int _i45 = 0; _i45 < _map44.size; ++_i45)
                 {
                   String _key46; // required
-                  String _val47; // optional
+                  String _val47; // required
                   _key46 = iprot.readString();
                   _val47 = iprot.readString();
                   struct.properties.put(_key46, _val47);
@@ -729,7 +729,7 @@ import org.slf4j.LoggerFactory;
           for (int _i51 = 0; _i51 < _map50.size; ++_i51)
           {
             String _key52; // required
-            String _val53; // optional
+            String _val53; // required
             _key52 = iprot.readString();
             _val53 = iprot.readString();
             struct.properties.put(_key52, _val53);

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/tabletserver/thrift/TabletClientService.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/tabletserver/thrift/TabletClientService.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/tabletserver/thrift/TabletClientService.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/tabletserver/thrift/TabletClientService.java Wed Aug  8 20:02:58 2012
@@ -3510,7 +3510,7 @@ import org.slf4j.LoggerFactory;
                   for (int _i77 = 0; _i77 < _map76.size; ++_i77)
                   {
                     String _key78; // required
-                    Map<String,String> _val79; // optional
+                    Map<String,String> _val79; // required
                     _key78 = iprot.readString();
                     {
                       org.apache.thrift.protocol.TMap _map80 = iprot.readMapBegin();
@@ -3518,7 +3518,7 @@ import org.slf4j.LoggerFactory;
                       for (int _i81 = 0; _i81 < _map80.size; ++_i81)
                       {
                         String _key82; // required
-                        String _val83; // optional
+                        String _val83; // required
                         _key82 = iprot.readString();
                         _val83 = iprot.readString();
                         _val79.put(_key82, _val83);
@@ -3851,7 +3851,7 @@ import org.slf4j.LoggerFactory;
             for (int _i104 = 0; _i104 < _map103.size; ++_i104)
             {
               String _key105; // required
-              Map<String,String> _val106; // optional
+              Map<String,String> _val106; // required
               _key105 = iprot.readString();
               {
                 org.apache.thrift.protocol.TMap _map107 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
@@ -3859,7 +3859,7 @@ import org.slf4j.LoggerFactory;
                 for (int _i108 = 0; _i108 < _map107.size; ++_i108)
                 {
                   String _key109; // required
-                  String _val110; // optional
+                  String _val110; // required
                   _key109 = iprot.readString();
                   _val110 = iprot.readString();
                   _val106.put(_key109, _val110);
@@ -7088,7 +7088,7 @@ import org.slf4j.LoggerFactory;
                   for (int _i115 = 0; _i115 < _map114.size; ++_i115)
                   {
                     org.apache.accumulo.core.data.thrift.TKeyExtent _key116; // required
-                    List<org.apache.accumulo.core.data.thrift.TRange> _val117; // optional
+                    List<org.apache.accumulo.core.data.thrift.TRange> _val117; // required
                     _key116 = new org.apache.accumulo.core.data.thrift.TKeyExtent();
                     _key116.read(iprot);
                     {
@@ -7158,7 +7158,7 @@ import org.slf4j.LoggerFactory;
                   for (int _i128 = 0; _i128 < _map127.size; ++_i128)
                   {
                     String _key129; // required
-                    Map<String,String> _val130; // optional
+                    Map<String,String> _val130; // required
                     _key129 = iprot.readString();
                     {
                       org.apache.thrift.protocol.TMap _map131 = iprot.readMapBegin();
@@ -7166,7 +7166,7 @@ import org.slf4j.LoggerFactory;
                       for (int _i132 = 0; _i132 < _map131.size; ++_i132)
                       {
                         String _key133; // required
-                        String _val134; // optional
+                        String _val134; // required
                         _key133 = iprot.readString();
                         _val134 = iprot.readString();
                         _val130.put(_key133, _val134);
@@ -7448,7 +7448,7 @@ import org.slf4j.LoggerFactory;
             for (int _i153 = 0; _i153 < _map152.size; ++_i153)
             {
               org.apache.accumulo.core.data.thrift.TKeyExtent _key154; // required
-              List<org.apache.accumulo.core.data.thrift.TRange> _val155; // optional
+              List<org.apache.accumulo.core.data.thrift.TRange> _val155; // required
               _key154 = new org.apache.accumulo.core.data.thrift.TKeyExtent();
               _key154.read(iprot);
               {
@@ -7502,7 +7502,7 @@ import org.slf4j.LoggerFactory;
             for (int _i166 = 0; _i166 < _map165.size; ++_i166)
             {
               String _key167; // required
-              Map<String,String> _val168; // optional
+              Map<String,String> _val168; // required
               _key167 = iprot.readString();
               {
                 org.apache.thrift.protocol.TMap _map169 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
@@ -7510,7 +7510,7 @@ import org.slf4j.LoggerFactory;
                 for (int _i170 = 0; _i170 < _map169.size; ++_i170)
                 {
                   String _key171; // required
-                  String _val172; // optional
+                  String _val172; // required
                   _key171 = iprot.readString();
                   _val172 = iprot.readString();
                   _val168.put(_key171, _val172);
@@ -10514,8 +10514,6 @@ import org.slf4j.LoggerFactory;
 
     private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
       try {
-        // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
-        __isset_bit_vector = new BitSet(1);
         read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
       } catch (org.apache.thrift.TException te) {
         throw new java.io.IOException(te);
@@ -14107,7 +14105,7 @@ import org.slf4j.LoggerFactory;
                   for (int _i185 = 0; _i185 < _map184.size; ++_i185)
                   {
                     org.apache.accumulo.core.data.thrift.TKeyExtent _key186; // required
-                    Map<String,org.apache.accumulo.core.data.thrift.MapFileInfo> _val187; // optional
+                    Map<String,org.apache.accumulo.core.data.thrift.MapFileInfo> _val187; // required
                     _key186 = new org.apache.accumulo.core.data.thrift.TKeyExtent();
                     _key186.read(iprot);
                     {
@@ -14116,7 +14114,7 @@ import org.slf4j.LoggerFactory;
                       for (int _i189 = 0; _i189 < _map188.size; ++_i189)
                       {
                         String _key190; // required
-                        org.apache.accumulo.core.data.thrift.MapFileInfo _val191; // optional
+                        org.apache.accumulo.core.data.thrift.MapFileInfo _val191; // required
                         _key190 = iprot.readString();
                         _val191 = new org.apache.accumulo.core.data.thrift.MapFileInfo();
                         _val191.read(iprot);
@@ -14283,7 +14281,7 @@ import org.slf4j.LoggerFactory;
             for (int _i197 = 0; _i197 < _map196.size; ++_i197)
             {
               org.apache.accumulo.core.data.thrift.TKeyExtent _key198; // required
-              Map<String,org.apache.accumulo.core.data.thrift.MapFileInfo> _val199; // optional
+              Map<String,org.apache.accumulo.core.data.thrift.MapFileInfo> _val199; // required
               _key198 = new org.apache.accumulo.core.data.thrift.TKeyExtent();
               _key198.read(iprot);
               {
@@ -14292,7 +14290,7 @@ import org.slf4j.LoggerFactory;
                 for (int _i201 = 0; _i201 < _map200.size; ++_i201)
                 {
                   String _key202; // required
-                  org.apache.accumulo.core.data.thrift.MapFileInfo _val203; // optional
+                  org.apache.accumulo.core.data.thrift.MapFileInfo _val203; // required
                   _key202 = iprot.readString();
                   _val203 = new org.apache.accumulo.core.data.thrift.MapFileInfo();
                   _val203.read(iprot);

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/shell/Shell.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/shell/Shell.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/shell/Shell.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/shell/Shell.java Wed Aug  8 20:02:58 2012
@@ -88,6 +88,7 @@ import org.apache.accumulo.core.util.she
 import org.apache.accumulo.core.util.shell.commands.EGrepCommand;
 import org.apache.accumulo.core.util.shell.commands.ExecfileCommand;
 import org.apache.accumulo.core.util.shell.commands.ExitCommand;
+import org.apache.accumulo.core.util.shell.commands.ExportTableCommand;
 import org.apache.accumulo.core.util.shell.commands.FlushCommand;
 import org.apache.accumulo.core.util.shell.commands.FormatterCommand;
 import org.apache.accumulo.core.util.shell.commands.GetAuthsCommand;
@@ -99,6 +100,7 @@ import org.apache.accumulo.core.util.she
 import org.apache.accumulo.core.util.shell.commands.HiddenCommand;
 import org.apache.accumulo.core.util.shell.commands.HistoryCommand;
 import org.apache.accumulo.core.util.shell.commands.ImportDirectoryCommand;
+import org.apache.accumulo.core.util.shell.commands.ImportTableCommand;
 import org.apache.accumulo.core.util.shell.commands.InfoCommand;
 import org.apache.accumulo.core.util.shell.commands.InsertCommand;
 import org.apache.accumulo.core.util.shell.commands.ListIterCommand;
@@ -301,7 +303,8 @@ public class Shell extends ShellOptions 
     Command[] stateCommands = {new AuthenticateCommand(), new ClsCommand(), new ClearCommand(), new NoTableCommand(), new SleepCommand(), new TableCommand(),
         new UserCommand(), new WhoAmICommand()};
     Command[] tableCommands = {new CloneTableCommand(), new ConfigCommand(), new CreateTableCommand(), new DeleteTableCommand(), new DropTableCommand(),
-        new DUCommand(), new OfflineCommand(), new OnlineCommand(), new RenameTableCommand(), new TablesCommand()};
+        new DUCommand(), new ExportTableCommand(), new ImportTableCommand(), new OfflineCommand(), new OnlineCommand(), new RenameTableCommand(),
+        new TablesCommand()};
     Command[] tableControlCommands = {new AddSplitsCommand(), new CompactCommand(), new ConstraintCommand(), new FlushCommand(), new GetGroupsCommand(),
         new GetSplitsCommand(), new MergeCommand(), new SetGroupsCommand()};
     Command[] userCommands = {new CreateUserCommand(), new DeleteUserCommand(), new DropUserCommand(), new GetAuthsCommand(), new PasswdCommand(),

Added: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/shell/commands/ExportTableCommand.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/shell/commands/ExportTableCommand.java?rev=1370925&view=auto
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/shell/commands/ExportTableCommand.java (added)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/shell/commands/ExportTableCommand.java Wed Aug  8 20:02:58 2012
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.core.util.shell.commands;
+
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.TableExistsException;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.accumulo.core.util.shell.Shell;
+import org.apache.accumulo.core.util.shell.Shell.Command;
+import org.apache.accumulo.core.util.shell.Token;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+
+public class ExportTableCommand extends Command {
+  
+  private Option tableOpt;
+
+  @Override
+  public int execute(String fullCommand, CommandLine cl, Shell shellState) throws AccumuloException, AccumuloSecurityException, TableNotFoundException,
+      TableExistsException {
+    
+    String tableName = OptUtil.getTableOpt(cl, shellState);
+
+    shellState.getConnector().tableOperations().exportTable(tableName, cl.getArgs()[0]);
+    return 0;
+  }
+  
+  @Override
+  public String usage() {
+    return getName() + " <export dir>";
+  }
+  
+  @Override
+  public Options getOptions() {
+    Options o = new Options();
+    
+    tableOpt = new Option(Shell.tableOption, "table", true, "table to export");
+    
+    tableOpt.setArgName("table");
+    
+    o.addOption(tableOpt);
+
+    return o;
+  }
+  
+  @Override
+  public String description() {
+    return "exports a table";
+  }
+  
+  public void registerCompletion(Token root, Map<Command.CompletionSet,Set<String>> completionSet) {
+    registerCompletionForTables(root, completionSet);
+  }
+  
+  @Override
+  public int numArgs() {
+    return 1;
+  }
+}

Added: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/shell/commands/ImportTableCommand.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/shell/commands/ImportTableCommand.java?rev=1370925&view=auto
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/shell/commands/ImportTableCommand.java (added)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/shell/commands/ImportTableCommand.java Wed Aug  8 20:02:58 2012
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.core.util.shell.commands;
+
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.TableExistsException;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.accumulo.core.util.shell.Shell;
+import org.apache.accumulo.core.util.shell.Shell.Command;
+import org.apache.commons.cli.CommandLine;
+
+public class ImportTableCommand extends Command {
+  
+  @Override
+  public int execute(String fullCommand, CommandLine cl, Shell shellState) throws AccumuloException, AccumuloSecurityException, TableNotFoundException,
+      TableExistsException {
+    
+    shellState.getConnector().tableOperations().importTable(cl.getArgs()[0], cl.getArgs()[1]);
+    return 0;
+  }
+  
+  @Override
+  public String usage() {
+    return getName() + " <table name> <import dir>";
+  }
+  
+  @Override
+  public String description() {
+    return "imports a table";
+  }
+  
+  @Override
+  public int numArgs() {
+    return 2;
+  }
+}

Modified: accumulo/trunk/core/src/main/thrift/client.thrift
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/thrift/client.thrift?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/thrift/client.thrift (original)
+++ accumulo/trunk/core/src/main/thrift/client.thrift Wed Aug  8 20:02:58 2012
@@ -34,6 +34,8 @@ enum TableOperation {
     DELETE_RANGE,
     BULK_IMPORT,
     COMPACT
+    IMPORT
+    EXPORT
 }
 
 enum TableOperationExceptionType {

Modified: accumulo/trunk/core/src/main/thrift/master.thrift
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/thrift/master.thrift?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/thrift/master.thrift (original)
+++ accumulo/trunk/core/src/main/thrift/master.thrift Wed Aug  8 20:02:58 2012
@@ -123,6 +123,8 @@ enum TableOperation {
   DELETE_RANGE
   BULK_IMPORT
   COMPACT
+  IMPORT
+  EXPORT
 }
 
 service MasterClientService {

Modified: accumulo/trunk/core/src/test/java/org/apache/accumulo/core/client/admin/TableOperationsHelperTest.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/test/java/org/apache/accumulo/core/client/admin/TableOperationsHelperTest.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/core/src/test/java/org/apache/accumulo/core/client/admin/TableOperationsHelperTest.java (original)
+++ accumulo/trunk/core/src/test/java/org/apache/accumulo/core/client/admin/TableOperationsHelperTest.java Wed Aug  8 20:02:58 2012
@@ -182,6 +182,12 @@ public class TableOperationsHelperTest {
       }
       Assert.assertEquals(expected, settings.get(tablename));
     }
+    
+    @Override
+    public void importTable(String tableName, String exportDir) throws TableExistsException, AccumuloException, AccumuloSecurityException {}
+    
+    @Override
+    public void exportTable(String tableName, String exportDir) throws TableNotFoundException, AccumuloException, AccumuloSecurityException {}
   }
   
   @Test

Added: accumulo/trunk/docs/examples/README.export
URL: http://svn.apache.org/viewvc/accumulo/trunk/docs/examples/README.export?rev=1370925&view=auto
==============================================================================
--- accumulo/trunk/docs/examples/README.export (added)
+++ accumulo/trunk/docs/examples/README.export Wed Aug  8 20:02:58 2012
@@ -0,0 +1,89 @@
+Notice:    Licensed to the Apache Software Foundation (ASF) under one
+           or more contributor license agreements.  See the NOTICE file
+           distributed with this work for additional information
+           regarding copyright ownership.  The ASF licenses this file
+           to you under the Apache License, Version 2.0 (the
+           "License"); you may not use this file except in compliance
+           with the License.  You may obtain a copy of the License at
+           .
+             http://www.apache.org/licenses/LICENSE-2.0
+           .
+           Unless required by applicable law or agreed to in writing,
+           software distributed under the License is distributed on an
+           "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+           KIND, either express or implied.  See the License for the
+           specific language governing permissions and limitations
+           under the License.
+
+Accumulo provides a mechanism to export and import tables.  This README shows
+how to use this feature.
+
+The shell session below shows creating a table, inserting data, and exporting
+the table.  A table must be offline to export it, and it should remain offline
+for the duration of the distcp.  An easy way to take a table offline without
+interuppting access to it is to clone it and take the clone offline.
+
+  root@test15> createtable table1
+  root@test15 table1> insert a cf1 cq1 v1
+  root@test15 table1> insert h cf1 cq1 v2
+  root@test15 table1> insert z cf1 cq1 v3
+  root@test15 table1> insert z cf1 cq2 v4
+  root@test15 table1> addsplits -t table1 b r
+  root@test15 table1> scan
+  a cf1:cq1 []    v1
+  h cf1:cq1 []    v2
+  z cf1:cq1 []    v3
+  z cf1:cq2 []    v4
+  root@test15> config -t table1 -s table.split.threshold=100M
+  root@test15 table1> clonetable table1 table1_exp
+  root@test15 table1> offline table1_exp
+  root@test15 table1> exporttable -t table1_exp /tmp/table1_export
+  root@test15 table1> quit
+
+After executing the export command, a few files are created in the hdfs dir.
+One of the files is a list of files to distcp as shown below.
+
+  $ hadoop fs -ls /tmp/table1_export
+  Found 2 items
+  -rw-r--r--   3 user supergroup        162 2012-07-25 09:56 /tmp/table1_export/distcp.txt
+  -rw-r--r--   3 user supergroup        821 2012-07-25 09:56 /tmp/table1_export/exportMetadata.zip
+  $ hadoop fs -cat /tmp/table1_export/distcp.txt
+  hdfs://n1.example.com:6093/accumulo/tables/3/default_tablet/F0000000.rf
+  hdfs://n1.example.com:6093/tmp/table1_export/exportMetadata.zip
+
+Before the table can be imported, it must be copied using distcp.  After the
+discp completed, the cloned table may be deleted.
+
+  $ hadoop distcp -f /tmp/table1_export/distcp.txt /tmp/table1_export_dest
+
+The Accumulo shell session below shows importing the table and inspecting it.
+The data, splits, config, and logical time information for the table were
+preserved.
+
+  root@test15> importtable table1_copy /tmp/table1_export_dest
+  root@test15> table table1_copy
+  root@test15 table1_copy> scan
+  a cf1:cq1 []    v1
+  h cf1:cq1 []    v2
+  z cf1:cq1 []    v3
+  z cf1:cq2 []    v4
+  root@test15 table1_copy> getsplits -t table1_copy
+  b
+  r
+  root@test15> config -t table1_copy -f split
+  ---------+--------------------------+-------------------------------------------
+  SCOPE    | NAME                     | VALUE
+  ---------+--------------------------+-------------------------------------------
+  default  | table.split.threshold .. | 1G
+  table    |    @override ........... | 100M
+  ---------+--------------------------+-------------------------------------------
+  root@test15> tables -l
+  !METADATA       =>         !0
+  trace           =>          1
+  table1_copy     =>          5
+  root@test15 table1_copy> scan -t !METADATA -b 5 -c srv:time
+  5;b srv:time []    M1343224500467
+  5;r srv:time []    M1343224500467
+  5< srv:time []    M1343224500467
+
+

Modified: accumulo/trunk/docs/src/user_manual/chapters/table_configuration.tex
URL: http://svn.apache.org/viewvc/accumulo/trunk/docs/src/user_manual/chapters/table_configuration.tex?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/docs/src/user_manual/chapters/table_configuration.tex (original)
+++ accumulo/trunk/docs/src/user_manual/chapters/table_configuration.tex Wed Aug  8 20:02:58 2012
@@ -666,4 +666,12 @@ root@a14 cic> 
 \end{verbatim}
 \normalsize
 
+\section{Exporting Tables}
 
+Accumulo supports exporting tables for the purpose of copying tables to another
+cluster.  Exporting and importing tables preserves the tables configuration,
+splits, and logical time.  Tables are exported and then copied via the hadoop
+distcp command.  To export a table, it must be offline and stay offline while
+discp runs.  The reason it needs to stay offline is to prevent files from being
+deleted.  A table can be cloned and the clone taken offline inorder to avoid
+losing access to the table.  See docs/examples/README.example for an example.

Modified: accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/Master.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/Master.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/Master.java (original)
+++ accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/Master.java Wed Aug  8 20:02:58 2012
@@ -132,6 +132,8 @@ import org.apache.accumulo.server.master
 import org.apache.accumulo.server.master.tableOps.CompactRange;
 import org.apache.accumulo.server.master.tableOps.CreateTable;
 import org.apache.accumulo.server.master.tableOps.DeleteTable;
+import org.apache.accumulo.server.master.tableOps.ExportTable;
+import org.apache.accumulo.server.master.tableOps.ImportTable;
 import org.apache.accumulo.server.master.tableOps.RenameTable;
 import org.apache.accumulo.server.master.tableOps.TableRangeOp;
 import org.apache.accumulo.server.master.tableOps.TraceRepo;
@@ -1031,6 +1033,30 @@ public class Master implements LiveTServ
           fate.seedTransaction(opid, new TraceRepo<Master>(new CompactRange(tableId, startRow, endRow, iterators)), autoCleanup);
           break;
         }
+        case IMPORT: {
+          String tableName = ByteBufferUtil.toString(arguments.get(0));
+          String exportDir = ByteBufferUtil.toString(arguments.get(1));
+          
+          verify(c, check(c, SystemPermission.CREATE_TABLE));
+          checkNotMetadataTable(tableName, TableOperation.CREATE);
+          checkTableName(tableName, TableOperation.CREATE);
+          
+          fate.seedTransaction(opid, new TraceRepo<Master>(new ImportTable(c.user, tableName, exportDir)), autoCleanup);
+          break;
+        }
+        case EXPORT: {
+          String tableName = ByteBufferUtil.toString(arguments.get(0));
+          String exportDir = ByteBufferUtil.toString(arguments.get(1));
+          
+          String tableId = checkTableId(tableName, TableOperation.EXPORT);
+          
+          verify(c, tableId, TableOperation.EXPORT, check(c, tableId, TablePermission.READ));
+          checkNotMetadataTable(tableName, TableOperation.EXPORT);
+          
+          fate.seedTransaction(opid, new TraceRepo<Master>(new ExportTable(tableName, tableId, exportDir)), autoCleanup);
+          break;
+        }
+
         default:
           throw new UnsupportedOperationException();
       }

Modified: accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/state/tables/TableManager.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/state/tables/TableManager.java?rev=1370925&r1=1370924&r2=1370925&view=diff
==============================================================================
--- accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/state/tables/TableManager.java (original)
+++ accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/state/tables/TableManager.java Wed Aug  8 20:02:58 2012
@@ -87,7 +87,7 @@ public class TableManager {
     return tableStateCache.get(tableId);
   }
   
-  public class IllegalTableTransitionException extends Exception {
+  public static class IllegalTableTransitionException extends Exception {
     private static final long serialVersionUID = 1L;
     
     final TableState oldState;

Added: accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/tableOps/ExportTable.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/tableOps/ExportTable.java?rev=1370925&view=auto
==============================================================================
--- accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/tableOps/ExportTable.java (added)
+++ accumulo/trunk/server/src/main/java/org/apache/accumulo/server/master/tableOps/ExportTable.java Wed Aug  8 20:02:58 2012
@@ -0,0 +1,309 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.server.master.tableOps;
+
+import java.io.BufferedOutputStream;
+import java.io.BufferedWriter;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Serializable;
+import java.net.URI;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
+
+import org.apache.accumulo.core.Constants;
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.Scanner;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.accumulo.core.client.impl.Tables;
+import org.apache.accumulo.core.client.impl.thrift.TableOperation;
+import org.apache.accumulo.core.client.impl.thrift.TableOperationExceptionType;
+import org.apache.accumulo.core.client.impl.thrift.ThriftTableOperationException;
+import org.apache.accumulo.core.conf.AccumuloConfiguration;
+import org.apache.accumulo.core.conf.DefaultConfiguration;
+import org.apache.accumulo.core.conf.Property;
+import org.apache.accumulo.core.data.Key;
+import org.apache.accumulo.core.data.KeyExtent;
+import org.apache.accumulo.core.data.Value;
+import org.apache.accumulo.core.master.state.tables.TableState;
+import org.apache.accumulo.fate.Repo;
+import org.apache.accumulo.server.ServerConstants;
+import org.apache.accumulo.server.conf.ServerConfiguration;
+import org.apache.accumulo.server.conf.TableConfiguration;
+import org.apache.accumulo.server.master.Master;
+import org.apache.accumulo.server.security.SecurityConstants;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Text;
+
+class ExportInfo implements Serializable {
+  
+  private static final long serialVersionUID = 1L;
+
+  public String tableName;
+  public String tableID;
+  public String exportDir;
+}
+
+class WriteExportFiles extends MasterRepo {
+  
+  private static final long serialVersionUID = 1L;
+  private ExportInfo tableInfo;
+  
+  WriteExportFiles(ExportInfo tableInfo) {
+    this.tableInfo = tableInfo;
+  }
+
+  private void checkOffline(Connector conn) throws Exception {
+    if (Tables.getTableState(conn.getInstance(), tableInfo.tableID) != TableState.OFFLINE) {
+      Tables.clearCache(conn.getInstance());
+      if (Tables.getTableState(conn.getInstance(), tableInfo.tableID) != TableState.OFFLINE) {
+        throw new ThriftTableOperationException(tableInfo.tableID, tableInfo.tableName, TableOperation.EXPORT, TableOperationExceptionType.OTHER,
+            "Table is not offline");
+      }
+    }
+  }
+  
+  @Override
+  public long isReady(long tid, Master env) throws Exception {
+    
+    long reserved = Utils.reserveTable(tableInfo.tableID, tid, false, true, TableOperation.EXPORT);
+    if (reserved > 0)
+      return reserved;
+
+    Connector conn = env.getInstance().getConnector(SecurityConstants.getSystemCredentials());
+    
+    checkOffline(conn);
+    
+    Scanner metaScanner = conn.createScanner(Constants.METADATA_TABLE_NAME, Constants.NO_AUTHS);
+    metaScanner.setRange(new KeyExtent(new Text(tableInfo.tableID), null, null).toMetadataRange());
+    metaScanner.fetchColumnFamily(Constants.METADATA_CURRENT_LOCATION_COLUMN_FAMILY);
+    
+    // TODO look for walogs
+    for (Entry<Key,Value> entry : metaScanner) {
+      return 500;
+    }
+    
+    return 0;
+  }
+
+  @Override
+  public Repo<Master> call(long tid, Master env) throws Exception {
+    Connector conn = env.getInstance().getConnector(SecurityConstants.getSystemCredentials());
+
+    try {
+      exportTable(env.getFileSystem(), conn, tableInfo.tableName, tableInfo.tableID, tableInfo.exportDir);
+    } catch (IOException ioe) {
+      throw new ThriftTableOperationException(tableInfo.tableID, tableInfo.tableName, TableOperation.EXPORT, TableOperationExceptionType.OTHER,
+          "Failed to create export files " + ioe.getMessage());
+    }
+    Utils.unreserveTable(tableInfo.tableID, tid, false);
+    Utils.unreserveHdfsDirectory(new Path(tableInfo.exportDir).toString(), tid);
+    return null;
+  }
+  
+  @Override
+  public void undo(long tid, Master env) throws Exception {
+    Utils.unreserveTable(tableInfo.tableID, tid, false);
+  }
+  
+  public static void exportTable(FileSystem fs, Connector conn, String tableName, String tableID, String exportDir) throws Exception {
+
+    fs.mkdirs(new Path(exportDir));
+    
+    Path exportMetaFilePath = new Path(exportDir, Constants.EXPORT_FILE);
+    
+    FSDataOutputStream fileOut = fs.create(exportMetaFilePath, false);
+    ZipOutputStream zipOut = new ZipOutputStream(fileOut);
+    BufferedOutputStream bufOut = new BufferedOutputStream(zipOut);
+    DataOutputStream dataOut = new DataOutputStream(bufOut);
+    
+    try {
+      
+      zipOut.putNextEntry(new ZipEntry(Constants.EXPORT_INFO_FILE));
+      OutputStreamWriter osw = new OutputStreamWriter(dataOut);
+      osw.append(ExportTable.EXPORT_VERSION_PROP + ":" + ExportTable.VERSION + "\n");
+      osw.append("srcInstanceName:" + conn.getInstance().getInstanceName() + "\n");
+      osw.append("srcInstanceID:" + conn.getInstance().getInstanceID() + "\n");
+      osw.append("srcZookeepers:" + conn.getInstance().getZooKeepers() + "\n");
+      osw.append("srcTableName:" + tableName + "\n");
+      osw.append("srcTableID:" + tableID + "\n");
+      osw.append(ExportTable.DATA_VERSION_PROP + ":" + Constants.DATA_VERSION + "\n");
+      osw.append("srcCodeVersion:" + Constants.VERSION + "\n");
+      
+      osw.flush();
+      dataOut.flush();
+      
+      exportConfig(conn, tableID, zipOut, dataOut);
+      dataOut.flush();
+      
+      Map<String,String> uniqueFiles = exportMetadata(conn, tableID, zipOut, dataOut);
+      
+      dataOut.close();
+      dataOut = null;
+
+      createDistcpFile(fs, exportDir, exportMetaFilePath, uniqueFiles);
+      
+    } finally {
+      if (dataOut != null)
+        dataOut.close();
+    }
+  }
+
+  private static void createDistcpFile(FileSystem fs, String exportDir, Path exportMetaFilePath, Map<String,String> uniqueFiles) throws IOException {
+    BufferedWriter distcpOut = new BufferedWriter(new OutputStreamWriter(fs.create(new Path(exportDir, "distcp.txt"), false)));
+    
+    try {
+      URI uri = fs.getUri();
+      
+      for (String relPath : uniqueFiles.values()) {
+        Path absPath = new Path(uri.getScheme(), uri.getAuthority(), ServerConstants.getTablesDir() + relPath);
+        distcpOut.append(absPath.toUri().toString());
+        distcpOut.newLine();
+      }
+      
+      Path absEMP = exportMetaFilePath;
+      if (!exportMetaFilePath.isAbsolute())
+        absEMP = new Path(fs.getWorkingDirectory().toUri().getPath(), exportMetaFilePath);
+      
+      distcpOut.append(new Path(uri.getScheme(), uri.getAuthority(), absEMP.toString()).toUri().toString());
+
+      distcpOut.newLine();
+      
+      distcpOut.close();
+      distcpOut = null;
+
+    } finally {
+      if (distcpOut != null)
+        distcpOut.close();
+    }
+  }
+  
+  private static Map<String,String> exportMetadata(Connector conn, String tableID, ZipOutputStream zipOut, DataOutputStream dataOut) throws IOException,
+      TableNotFoundException {
+    zipOut.putNextEntry(new ZipEntry(Constants.EXPORT_METADATA_FILE));
+    
+    Map<String,String> uniqueFiles = new HashMap<String,String>();
+
+    Scanner metaScanner = conn.createScanner(Constants.METADATA_TABLE_NAME, Constants.NO_AUTHS);
+    metaScanner.fetchColumnFamily(Constants.METADATA_DATAFILE_COLUMN_FAMILY);
+    Constants.METADATA_PREV_ROW_COLUMN.fetch(metaScanner);
+    Constants.METADATA_TIME_COLUMN.fetch(metaScanner);
+    metaScanner.setRange(new KeyExtent(new Text(tableID), null, null).toMetadataRange());
+    
+    for (Entry<Key,Value> entry : metaScanner) {
+      entry.getKey().write(dataOut);
+      entry.getValue().write(dataOut);
+      
+      if (entry.getKey().getColumnFamily().equals(Constants.METADATA_DATAFILE_COLUMN_FAMILY)) {
+        String relPath = entry.getKey().getColumnQualifierData().toString();
+        
+        if (relPath.startsWith("../"))
+          relPath = relPath.substring(2);
+        else
+          relPath = "/" + tableID + relPath;
+        
+        String tokens[] = relPath.split("/");
+        if (tokens.length != 4) {
+          throw new RuntimeException("Illegal path " + relPath);
+        }
+        
+        String filename = tokens[3];
+        
+        String existingPath = uniqueFiles.get(filename);
+        if (existingPath == null) {
+          uniqueFiles.put(filename, relPath);
+        } else if (!existingPath.equals(relPath)) {
+          // make sure file names are unique, should only apply for tables with file names generated by Accumulo 1.3 and earlier
+          // TODO throw another type of exception?
+          throw new RuntimeException("Cannot export table with nonunique file names " + filename + ". Major compact table.");
+        }
+        
+      }
+    }
+    return uniqueFiles;
+  }
+
+  private static void exportConfig(Connector conn, String tableID, ZipOutputStream zipOut, DataOutputStream dataOut) throws AccumuloException,
+      AccumuloSecurityException, TableNotFoundException, IOException {
+
+    DefaultConfiguration defaultConfig = AccumuloConfiguration.getDefaultConfiguration();
+    Map<String,String> siteConfig = conn.instanceOperations().getSiteConfiguration();
+    Map<String,String> systemConfig = conn.instanceOperations().getSystemConfiguration();
+
+    TableConfiguration tableConfig = ServerConfiguration.getTableConfiguration(conn.getInstance(), tableID);
+
+    OutputStreamWriter osw = new OutputStreamWriter(dataOut);
+
+    // only put props that are different than defaults and higher level configurations
+    zipOut.putNextEntry(new ZipEntry(Constants.EXPORT_TABLE_CONFIG_FILE));
+    for (Entry<String,String> prop : tableConfig) {
+      if (prop.getKey().startsWith(Property.TABLE_PREFIX.getKey())) {
+        Property key = Property.getPropertyByKey(prop.getKey());
+        
+        if (key == null || !defaultConfig.get(key).equals(prop.getValue())) {
+          if (!prop.getValue().equals(siteConfig.get(prop.getKey())) && !prop.getValue().equals(systemConfig.get(prop.getKey()))) {
+            osw.append(prop.getKey() + "=" + prop.getValue() + "\n");
+          }
+        }
+      }
+    }
+    
+    osw.flush();
+  }
+}
+
+public class ExportTable extends MasterRepo {
+  private static final long serialVersionUID = 1L;
+  
+  private ExportInfo tableInfo;
+
+  public ExportTable(String tableName, String tableId, String exportDir) {
+    tableInfo = new ExportInfo();
+    tableInfo.tableName = tableName;
+    tableInfo.exportDir = exportDir;
+    tableInfo.tableID = tableId;
+  }
+  
+  @Override
+  public long isReady(long tid, Master environment) throws Exception {
+    return Utils.reserveHdfsDirectory(new Path(tableInfo.exportDir).toString(), tid);
+  }
+  
+  @Override
+  public Repo<Master> call(long tid, Master env) throws Exception {
+    return new WriteExportFiles(tableInfo);
+  }
+  
+  @Override
+  public void undo(long tid, Master env) throws Exception {
+    Utils.unreserveHdfsDirectory(new Path(tableInfo.exportDir).toString(), tid);
+  }
+  
+  public static final int VERSION = 1;
+  
+  public static final String DATA_VERSION_PROP = "srcDataVersion";
+  public static final String EXPORT_VERSION_PROP = "exportVersion";
+
+}



Mime
View raw message