Return-Path: X-Original-To: apmail-accumulo-commits-archive@www.apache.org Delivered-To: apmail-accumulo-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 99F84116BA for ; Tue, 22 Apr 2014 21:09:00 +0000 (UTC) Received: (qmail 82447 invoked by uid 500); 22 Apr 2014 21:08:49 -0000 Delivered-To: apmail-accumulo-commits-archive@accumulo.apache.org Received: (qmail 82175 invoked by uid 500); 22 Apr 2014 21:08:43 -0000 Mailing-List: contact commits-help@accumulo.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@accumulo.apache.org Delivered-To: mailing list commits@accumulo.apache.org Received: (qmail 81847 invoked by uid 99); 22 Apr 2014 21:08:36 -0000 Received: from tyr.zones.apache.org (HELO tyr.zones.apache.org) (140.211.11.114) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 22 Apr 2014 21:08:36 +0000 Received: by tyr.zones.apache.org (Postfix, from userid 65534) id 89BC78C2D2F; Tue, 22 Apr 2014 21:08:36 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: busbey@apache.org To: commits@accumulo.apache.org Date: Tue, 22 Apr 2014 21:08:44 -0000 Message-Id: <8c161dd6b37c4dba853617b81e6722a4@git.apache.org> In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [09/15] git commit: ACCUMULO-2654 Update empty rfile utility to use jcommander. ACCUMULO-2654 Update empty rfile utility to use jcommander. Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/a65565b4 Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/a65565b4 Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/a65565b4 Branch: refs/heads/1.6.0-SNAPSHOT Commit: a65565b43d3748c2772a90a024fe2d3745869c49 Parents: 101cd1f Author: Sean Busbey Authored: Wed Apr 16 02:19:09 2014 -0500 Committer: Sean Busbey Committed: Tue Apr 22 14:36:32 2014 -0500 ---------------------------------------------------------------------- .../accumulo/core/file/rfile/CreateEmpty.java | 61 +++++++++++--------- 1 file changed, 35 insertions(+), 26 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/accumulo/blob/a65565b4/core/src/main/java/org/apache/accumulo/core/file/rfile/CreateEmpty.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/accumulo/core/file/rfile/CreateEmpty.java b/core/src/main/java/org/apache/accumulo/core/file/rfile/CreateEmpty.java index 7663b2d..09a2d61 100644 --- a/core/src/main/java/org/apache/accumulo/core/file/rfile/CreateEmpty.java +++ b/core/src/main/java/org/apache/accumulo/core/file/rfile/CreateEmpty.java @@ -18,53 +18,62 @@ package org.apache.accumulo.core.file.rfile; import java.util.ArrayList; import java.util.Arrays; +import java.util.List; +import org.apache.accumulo.core.cli.Help; import org.apache.accumulo.core.conf.DefaultConfiguration; import org.apache.accumulo.core.file.FileSKVWriter; import org.apache.accumulo.core.file.rfile.RFile.Writer; import org.apache.accumulo.core.file.rfile.bcfile.TFile; import org.apache.accumulo.core.util.CachedConfiguration; -import org.apache.commons.cli.BasicParser; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import com.beust.jcommander.IParameterValidator; +import com.beust.jcommander.Parameter; +import com.beust.jcommander.ParameterException; + /** * Create an empty RFile for use in recovering from data loss where Accumulo still refers internally to a path. */ public class CreateEmpty { - public static void main(String[] args) throws Exception { - Configuration conf = CachedConfiguration.getInstance(); - - Options opts = new Options(); - Option codecOption = new Option("c", "codec", true, "the compression codec to use. one of " + Arrays.toString(TFile.getSupportedCompressionAlgorithms()) + ". defaults to none."); - opts.addOption(codecOption); - Option help = new Option( "?", "help", false, "print this message" ); - opts.addOption(help); - - CommandLine commandLine = new BasicParser().parse(opts, args); - if (commandLine.hasOption(help.getOpt()) || 0 == commandLine.getArgs().length) { - HelpFormatter formatter = new HelpFormatter(); - formatter.printHelp(120, "$ACCUMULO_HOME/bin/accumulo " + CreateEmpty.class.getName() + "[options] path [path ...]", - "", opts, - "each path given is a filesystem URL. Relative paths are resolved according to the default filesytem defined in your Hadoop configuration, which is usually an HDFS instance."); + public static class NamedLikeRFile implements IParameterValidator { + @Override + public void validate(String name, String value) throws ParameterException { + if (!value.endsWith(".rf")) { + throw new ParameterException("File must end with .rf and '" + value + "' does not."); + } } - String codec = commandLine.getOptionValue(codecOption.getOpt(), TFile.COMPRESSION_NONE); + } - for (String arg : commandLine.getArgs()) { - if (!arg.endsWith(".rf")) { - throw new IllegalArgumentException("File must end with .rf and '" + arg + "' does not."); + public static class IsSupportedCompressionAlgorithm implements IParameterValidator { + @Override + public void validate(String name, String value) throws ParameterException { + String[] algorithms = TFile.getSupportedCompressionAlgorithms(); + if (!((Arrays.asList(algorithms)).contains(value))) { + throw new ParameterException("Compression codec must be one of " + Arrays.toString(TFile.getSupportedCompressionAlgorithms())); } } + } + + static class Opts extends Help { + @Parameter(names = {"-c", "--codec"}, description = "the compression codec to use.", validateWith = IsSupportedCompressionAlgorithm.class) + String codec = TFile.COMPRESSION_NONE; + @Parameter(description = " { ... } Each path given is a URL. Relative paths are resolved according to the default filesystem defined in your Hadoop configuration, which is usually an HDFS instance.", required = true, validateWith = NamedLikeRFile.class) + List files = new ArrayList(); + } + + public static void main(String[] args) throws Exception { + Configuration conf = CachedConfiguration.getInstance(); + + Opts opts = new Opts(); + opts.parseArgs(CreateEmpty.class.getName(), args); - for (String arg : commandLine.getArgs()) { + for (String arg : opts.files) { Path path = new Path(arg); - FileSKVWriter writer = (new RFileOperations()).openWriter(arg, path.getFileSystem(conf), conf, DefaultConfiguration.getDefaultConfiguration(), codec); + FileSKVWriter writer = (new RFileOperations()).openWriter(arg, path.getFileSystem(conf), conf, DefaultConfiguration.getDefaultConfiguration(), opts.codec); writer.close(); } }