tajo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jh...@apache.org
Subject [1/2] TAJO-753: Clean up of maven dependencies. (jinho)
Date Tue, 15 Apr 2014 10:01:30 GMT
Repository: tajo
Updated Branches:
  refs/heads/master 06a14960e -> 1d24a25ac


http://git-wip-us.apache.org/repos/asf/tajo/blob/1d24a25a/tajo-rpc/pom.xml
----------------------------------------------------------------------
diff --git a/tajo-rpc/pom.xml b/tajo-rpc/pom.xml
index 7f7ae79..839e3e0 100644
--- a/tajo-rpc/pom.xml
+++ b/tajo-rpc/pom.xml
@@ -137,54 +137,22 @@
           <groupId>com.google.code.gson</groupId>
           <artifactId>gson</artifactId>
         </exclusion>
-        <exclusion>
-          <groupId>org.slf4j</groupId>
-          <artifactId>slf4j-log4j12</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.google.inject</groupId>
-          <artifactId>guice</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.google.inject.extensions</groupId>
-          <artifactId>guice-servlet</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.sun.jersey.jersey-test-framework</groupId>
-          <artifactId>jersey-test-framework-grizzly2</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.sun.jersey.contribs</groupId>
-          <artifactId>jersey-guice</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>com.google.protobuf</groupId>
-      <artifactId>protobuf-java</artifactId>
-    </dependency>
-    <dependency>
       <groupId>io.netty</groupId>
       <artifactId>netty</artifactId>
     </dependency>
     <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging-api</artifactId>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
     </dependency>
     <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>commons-lang</groupId>
-      <artifactId>commons-lang</artifactId>
-      <version>2.6</version>
-    </dependency>
   </dependencies>
   <properties>
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

http://git-wip-us.apache.org/repos/asf/tajo/blob/1d24a25a/tajo-rpc/src/main/java/org/apache/tajo/rpc/ServerCallable.java
----------------------------------------------------------------------
diff --git a/tajo-rpc/src/main/java/org/apache/tajo/rpc/ServerCallable.java b/tajo-rpc/src/main/java/org/apache/tajo/rpc/ServerCallable.java
index 214a4ba..b4e5f9a 100644
--- a/tajo-rpc/src/main/java/org/apache/tajo/rpc/ServerCallable.java
+++ b/tajo-rpc/src/main/java/org/apache/tajo/rpc/ServerCallable.java
@@ -25,7 +25,6 @@ import java.util.ArrayList;
 import java.util.List;
 
 import com.google.protobuf.ServiceException;
-import org.apache.hadoop.ipc.RemoteException;
 import org.apache.tajo.conf.TajoConf;
 
 public abstract class ServerCallable<T> {
@@ -156,8 +155,8 @@ public abstract class ServerCallable<T> {
     if (t instanceof UndeclaredThrowableException) {
       t = t.getCause();
     }
-    if (t instanceof RemoteException) {
-      t = ((RemoteException)t).unwrapRemoteException();
+    if (t instanceof RemoteException && t.getCause() != null) {
+      t = t.getCause();
     }
     return t;
   }

http://git-wip-us.apache.org/repos/asf/tajo/blob/1d24a25a/tajo-storage/pom.xml
----------------------------------------------------------------------
diff --git a/tajo-storage/pom.xml b/tajo-storage/pom.xml
index 5850ed4..ad81f71 100644
--- a/tajo-storage/pom.xml
+++ b/tajo-storage/pom.xml
@@ -155,13 +155,11 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.tajo</groupId>
+      <artifactId>tajo-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tajo</groupId>
       <artifactId>tajo-catalog-common</artifactId>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.tajo</groupId>
-          <artifactId>tajo-rpc</artifactId>
-        </exclusion>
-      </exclusions>
     </dependency>
 
     <dependency>
@@ -184,10 +182,10 @@
         </exclusion>
       </exclusions>
     </dependency>
-
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
       <exclusions>
         <exclusion>
           <artifactId>zookeeper</artifactId>
@@ -206,6 +204,7 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
+      <scope>provided</scope>
       <exclusions>
       <exclusion>
         <groupId>commons-el</groupId>
@@ -229,7 +228,6 @@
         </exclusion>
       </exclusions>
     </dependency>
-
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-minicluster</artifactId>
@@ -255,9 +253,32 @@
           <groupId>com.sun.jersey.jersey-test-framework</groupId>
           <artifactId>jersey-test-framework-grizzly2</artifactId>
         </exclusion>
+        <exclusion>
+          <artifactId>hadoop-yarn-server-tests</artifactId>
+          <groupId>org.apache.hadoop</groupId>
+        </exclusion>
+        <exclusion>
+          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+          <groupId>org.apache.hadoop</groupId>
+        </exclusion>
+        <exclusion>
+          <artifactId>hadoop-mapreduce-client-app</artifactId>
+          <groupId>org.apache.hadoop</groupId>
+        </exclusion>
+        <exclusion>
+          <artifactId>hadoop-yarn-api</artifactId>
+          <groupId>org.apache.hadoop</groupId>
+        </exclusion>
+        <exclusion>
+          <artifactId>hadoop-mapreduce-client-hs</artifactId>
+          <groupId>org.apache.hadoop</groupId>
+        </exclusion>
+        <exclusion>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <groupId>org.apache.hadoop</groupId>
+        </exclusion>
       </exclusions>
     </dependency>
-
     <dependency>
       <groupId>com.google.protobuf</groupId>
       <artifactId>protobuf-java</artifactId>
@@ -268,31 +289,6 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging-api</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>com.google.guava</groupId>
-      <artifactId>guava</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>com.google.code.gson</groupId>
-      <artifactId>gson</artifactId>
-      <type>jar</type>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
-      <groupId>commons-lang</groupId>
-      <artifactId>commons-lang</artifactId>
-      <version>2.6</version>
-    </dependency>
-
-    <dependency>
       <groupId>com.twitter</groupId>
       <artifactId>parquet-column</artifactId>
       <version>${parquet.version}</version>

http://git-wip-us.apache.org/repos/asf/tajo/blob/1d24a25a/tajo-storage/src/main/java/org/apache/tajo/storage/AbstractStorageManager.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/AbstractStorageManager.java
b/tajo-storage/src/main/java/org/apache/tajo/storage/AbstractStorageManager.java
index 6615208..a2dda76 100644
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/AbstractStorageManager.java
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/AbstractStorageManager.java
@@ -22,7 +22,6 @@ package org.apache.tajo.storage;
 import com.google.common.collect.Lists;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.commons.net.util.Base64;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
@@ -52,6 +51,7 @@ public abstract class AbstractStorageManager {
   protected final FileSystem fs;
   protected final Path tableBaseDir;
   protected final boolean blocksMetadataEnabled;
+  private static final HdfsVolumeId zeroVolumeId = new HdfsVolumeId(Bytes.toBytes(0));
 
   /**
    * Cache of scanner handlers for each storage type.
@@ -492,15 +492,8 @@ public abstract class AbstractStorageManager {
     int[] diskIds = new int[volumeIds.length];
     for (int i = 0; i < volumeIds.length; i++) {
       int diskId = -1;
-      if (volumeIds[i] != null && volumeIds[i].isValid()) {
-        String volumeIdString = volumeIds[i].toString();
-        byte[] volumeIdBytes = Base64.decodeBase64(volumeIdString);
-
-        if (volumeIdBytes.length == 4) {
-          diskId = Bytes.toInt(volumeIdBytes);
-        } else if (volumeIdBytes.length == 1) {
-          diskId = (int) volumeIdBytes[0];  // support hadoop-2.0.2
-        }
+      if (volumeIds[i] != null && volumeIds[i].hashCode() > 0) {
+        diskId = volumeIds[i].hashCode() - zeroVolumeId.hashCode();
       }
       diskIds[i] = diskId;
     }

http://git-wip-us.apache.org/repos/asf/tajo/blob/1d24a25a/tajo-storage/src/main/java/org/apache/tajo/storage/CSVFile.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/CSVFile.java b/tajo-storage/src/main/java/org/apache/tajo/storage/CSVFile.java
index 857278a..652a8e9 100644
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/CSVFile.java
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/CSVFile.java
@@ -82,10 +82,10 @@ public class CSVFile {
       this.fs = path.getFileSystem(conf);
       this.meta = meta;
       this.schema = schema;
-      this.delimiter = StringEscapeUtils.unescapeJava(this.meta.getOption(CatalogConstants.CSVFILE_DELIMITER,
-          CatalogConstants.DEFAULT_FIELD_DELIMITER)).charAt(0);
+      this.delimiter = StringEscapeUtils.unescapeJava(this.meta.getOption(StorageConstants.CSVFILE_DELIMITER,
+          StorageConstants.DEFAULT_FIELD_DELIMITER)).charAt(0);
       this.columnNum = schema.size();
-      String nullCharacters = StringEscapeUtils.unescapeJava(this.meta.getOption(CatalogConstants.CSVFILE_NULL));
+      String nullCharacters = StringEscapeUtils.unescapeJava(this.meta.getOption(StorageConstants.CSVFILE_NULL));
       if (StringUtils.isEmpty(nullCharacters)) {
         nullChars = NullDatum.get().asTextBytes();
       } else {
@@ -108,7 +108,7 @@ public class CSVFile {
         isShuffle = false;
       }
 
-      String codecName = this.meta.getOption(CatalogConstants.COMPRESSION_CODEC);
+      String codecName = this.meta.getOption(StorageConstants.COMPRESSION_CODEC);
       if(!StringUtils.isEmpty(codecName)){
         codecFactory = new CompressionCodecFactory(conf);
         codec = codecFactory.getCodecByClassName(codecName);
@@ -139,7 +139,7 @@ public class CSVFile {
       }
 
       try {
-        String serdeClass = this.meta.getOption(CatalogConstants.CSVFILE_SERDE,
+        String serdeClass = this.meta.getOption(StorageConstants.CSVFILE_SERDE,
             TextSerializerDeserializer.class.getName());
         serde = (SerializerDeserializer) Class.forName(serdeClass).newInstance();
       } catch (Exception e) {
@@ -260,10 +260,10 @@ public class CSVFile {
       }
 
       //Delimiter
-      String delim  = meta.getOption(CatalogConstants.CSVFILE_DELIMITER, CatalogConstants.DEFAULT_FIELD_DELIMITER);
+      String delim  = meta.getOption(StorageConstants.CSVFILE_DELIMITER, StorageConstants.DEFAULT_FIELD_DELIMITER);
       this.delimiter = StringEscapeUtils.unescapeJava(delim).charAt(0);
 
-      String nullCharacters = StringEscapeUtils.unescapeJava(meta.getOption(CatalogConstants.CSVFILE_NULL));
+      String nullCharacters = StringEscapeUtils.unescapeJava(meta.getOption(StorageConstants.CSVFILE_NULL));
       if (StringUtils.isEmpty(nullCharacters)) {
         nullChars = NullDatum.get().asTextBytes();
       } else {
@@ -340,7 +340,7 @@ public class CSVFile {
       }
 
       try {
-        String serdeClass = this.meta.getOption(CatalogConstants.CSVFILE_SERDE,
+        String serdeClass = this.meta.getOption(StorageConstants.CSVFILE_SERDE,
             TextSerializerDeserializer.class.getName());
         serde = (SerializerDeserializer) Class.forName(serdeClass).newInstance();
       } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/tajo/blob/1d24a25a/tajo-storage/src/main/java/org/apache/tajo/storage/StorageConstants.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/StorageConstants.java b/tajo-storage/src/main/java/org/apache/tajo/storage/StorageConstants.java
new file mode 100644
index 0000000..414fa35
--- /dev/null
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/StorageConstants.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.storage;
+
+import parquet.hadoop.ParquetWriter;
+import parquet.hadoop.metadata.CompressionCodecName;
+
+public class StorageConstants {
+  // table options
+  public static final String COMPRESSION_CODEC = "compression.codec";
+  public static final String COMPRESSION_TYPE = "compression.type";
+
+  public static final String CSVFILE_DELIMITER = "csvfile.delimiter";
+  public static final String CSVFILE_NULL = "csvfile.null";
+  public static final String CSVFILE_SERDE = "csvfile.serde";
+
+
+  public static final String SEQUENCEFILE_DELIMITER = "sequencefile.delimiter";
+  public static final String SEQUENCEFILE_NULL = "sequencefile.null";
+  public static final String SEQUENCEFILE_SERDE = "sequencefile.serde";
+
+  public static final String RCFILE_NULL = "rcfile.null";
+  public static final String RCFILE_SERDE = "rcfile.serde";
+
+  public static final String DEFAULT_FIELD_DELIMITER = "|";
+  public static final String DEFAULT_BINARY_SERDE = BinarySerializerDeserializer.class.getName();
+  public static final String DEFAULT_TEXT_SERDE = TextSerializerDeserializer.class.getName();
+
+  public static final String PARQUET_DEFAULT_BLOCK_SIZE;
+  public static final String PARQUET_DEFAULT_PAGE_SIZE;
+  public static final String PARQUET_DEFAULT_COMPRESSION_CODEC_NAME;
+  public static final String PARQUET_DEFAULT_IS_DICTIONARY_ENABLED;
+  public static final String PARQUET_DEFAULT_IS_VALIDATION_ENABLED;
+
+  static {
+    PARQUET_DEFAULT_BLOCK_SIZE =
+        Integer.toString(ParquetWriter.DEFAULT_BLOCK_SIZE);
+    PARQUET_DEFAULT_PAGE_SIZE =
+        Integer.toString(ParquetWriter.DEFAULT_PAGE_SIZE);
+
+    // When parquet-hadoop 1.3.3 is available, this should be changed to
+    // ParquetWriter.DEFAULT_COMPRESSION_CODEC_NAME.
+    PARQUET_DEFAULT_COMPRESSION_CODEC_NAME =
+        CompressionCodecName.UNCOMPRESSED.name().toLowerCase();
+
+    // When parquet-hadoop 1.3.3 is available, this should be changed to
+    // ParquetWriter.DEFAULT_IS_DICTIONARY_ENABLED.
+    PARQUET_DEFAULT_IS_DICTIONARY_ENABLED = "true";
+
+    // When parquet-hadoop 1.3.3 is available, this should be changed to
+    // ParquetWriter.DEFAULT_IS_VALIDATING_ENABLED.
+    PARQUET_DEFAULT_IS_VALIDATION_ENABLED = "false";
+  }
+}

http://git-wip-us.apache.org/repos/asf/tajo/blob/1d24a25a/tajo-storage/src/main/java/org/apache/tajo/storage/StorageUtil.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/StorageUtil.java b/tajo-storage/src/main/java/org/apache/tajo/storage/StorageUtil.java
index 1f13946..2acae5a 100644
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/StorageUtil.java
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/StorageUtil.java
@@ -22,14 +22,14 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.tajo.catalog.Column;
-import org.apache.tajo.catalog.Schema;
-import org.apache.tajo.catalog.TableMeta;
+import org.apache.tajo.catalog.*;
+import org.apache.tajo.catalog.proto.CatalogProtos;
 import org.apache.tajo.util.FileUtil;
+import parquet.hadoop.ParquetOutputFormat;
 
 import java.io.IOException;
 
-public class StorageUtil {
+public class StorageUtil extends StorageConstants{
   public static int getRowByteSize(Schema schema) {
     int sum = 0;
     for(Column col : schema.getColumns()) {
@@ -99,4 +99,24 @@ public class StorageUtil {
     
     return new Path(parent, sb.toString());
   }
+
+  public static Options newPhysicalProperties(CatalogProtos.StoreType type) {
+    Options options = new Options();
+    if (CatalogProtos.StoreType.CSV == type) {
+      options.put(CSVFILE_DELIMITER, DEFAULT_FIELD_DELIMITER);
+    } else if (CatalogProtos.StoreType.RCFILE == type) {
+      options.put(RCFILE_SERDE, DEFAULT_BINARY_SERDE);
+    } else if (CatalogProtos.StoreType.SEQUENCEFILE == type) {
+      options.put(SEQUENCEFILE_SERDE, DEFAULT_TEXT_SERDE);
+      options.put(SEQUENCEFILE_DELIMITER, DEFAULT_FIELD_DELIMITER);
+    } else if (type == CatalogProtos.StoreType.PARQUET) {
+      options.put(ParquetOutputFormat.BLOCK_SIZE, PARQUET_DEFAULT_BLOCK_SIZE);
+      options.put(ParquetOutputFormat.PAGE_SIZE, PARQUET_DEFAULT_PAGE_SIZE);
+      options.put(ParquetOutputFormat.COMPRESSION, PARQUET_DEFAULT_COMPRESSION_CODEC_NAME);
+      options.put(ParquetOutputFormat.ENABLE_DICTIONARY, PARQUET_DEFAULT_IS_DICTIONARY_ENABLED);
+      options.put(ParquetOutputFormat.VALIDATION, PARQUET_DEFAULT_IS_VALIDATION_ENABLED);
+    }
+
+    return options;
+  }
 }

http://git-wip-us.apache.org/repos/asf/tajo/blob/1d24a25a/tajo-storage/src/main/java/org/apache/tajo/storage/rcfile/RCFile.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/rcfile/RCFile.java b/tajo-storage/src/main/java/org/apache/tajo/storage/rcfile/RCFile.java
index 1beea99..1da41e4 100644
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/rcfile/RCFile.java
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/rcfile/RCFile.java
@@ -29,7 +29,6 @@ import org.apache.hadoop.io.*;
 import org.apache.hadoop.io.SequenceFile.Metadata;
 import org.apache.hadoop.io.compress.*;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.tajo.catalog.CatalogConstants;
 import org.apache.tajo.catalog.Column;
 import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.catalog.TableMeta;
@@ -42,8 +41,8 @@ import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
 import org.apache.tajo.util.Bytes;
 
-import java.io.*;
 import java.io.Closeable;
+import java.io.*;
 import java.rmi.server.UID;
 import java.security.MessageDigest;
 import java.util.Arrays;
@@ -735,7 +734,7 @@ public class RCFile {
         isShuffle = false;
       }
 
-      String codecClassname = this.meta.getOption(CatalogConstants.COMPRESSION_CODEC);
+      String codecClassname = this.meta.getOption(StorageConstants.COMPRESSION_CODEC);
       if (!StringUtils.isEmpty(codecClassname)) {
         try {
           Class<? extends CompressionCodec> codecClass = conf.getClassByName(
@@ -747,7 +746,7 @@ public class RCFile {
         }
       }
 
-      String nullCharacters = StringEscapeUtils.unescapeJava(this.meta.getOption(CatalogConstants.RCFILE_NULL));
+      String nullCharacters = StringEscapeUtils.unescapeJava(this.meta.getOption(StorageConstants.RCFILE_NULL));
       if (StringUtils.isEmpty(nullCharacters)) {
         nullChars = NullDatum.get().asTextBytes();
       } else {
@@ -760,7 +759,7 @@ public class RCFile {
 
       metadata.set(new Text(COLUMN_NUMBER_METADATA_STR), new Text("" + columnNumber));
 
-      String serdeClass = this.meta.getOption(CatalogConstants.RCFILE_SERDE,
+      String serdeClass = this.meta.getOption(StorageConstants.RCFILE_SERDE,
           BinarySerializerDeserializer.class.getName());
       try {
         serde = (SerializerDeserializer) Class.forName(serdeClass).newInstance();
@@ -768,7 +767,7 @@ public class RCFile {
         LOG.error(e.getMessage(), e);
         throw new IOException(e);
       }
-      metadata.set(new Text(CatalogConstants.RCFILE_SERDE), new Text(serdeClass));
+      metadata.set(new Text(StorageConstants.RCFILE_SERDE), new Text(serdeClass));
 
       columnBuffers = new ColumnBuffer[columnNumber];
       for (int i = 0; i < columnNumber; i++) {
@@ -1195,7 +1194,7 @@ public class RCFile {
       rowId = new LongWritable();
       readBytes = 0;
 
-      String nullCharacters = StringEscapeUtils.unescapeJava(meta.getOption(CatalogConstants.RCFILE_NULL));
+      String nullCharacters = StringEscapeUtils.unescapeJava(meta.getOption(StorageConstants.RCFILE_NULL));
       if (StringUtils.isEmpty(nullCharacters)) {
         nullChars = NullDatum.get().asTextBytes();
       } else {
@@ -1364,14 +1363,14 @@ public class RCFile {
       metadata = new Metadata();
       metadata.readFields(in);
 
-      Text text = metadata.get(new Text(CatalogConstants.RCFILE_SERDE));
+      Text text = metadata.get(new Text(StorageConstants.RCFILE_SERDE));
 
       try {
         String serdeClass;
         if(text != null && !text.toString().isEmpty()){
           serdeClass = text.toString();
         } else{
-          serdeClass = this.meta.getOption(CatalogConstants.RCFILE_SERDE, BinarySerializerDeserializer.class.getName());
+          serdeClass = this.meta.getOption(StorageConstants.RCFILE_SERDE, BinarySerializerDeserializer.class.getName());
         }
         serde = (SerializerDeserializer) Class.forName(serdeClass).newInstance();
       } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/tajo/blob/1d24a25a/tajo-storage/src/main/java/org/apache/tajo/storage/sequencefile/SequenceFileAppender.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/sequencefile/SequenceFileAppender.java
b/tajo-storage/src/main/java/org/apache/tajo/storage/sequencefile/SequenceFileAppender.java
index 8092c0c..9eb1b2d 100644
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/sequencefile/SequenceFileAppender.java
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/sequencefile/SequenceFileAppender.java
@@ -32,7 +32,6 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionCodecFactory;
-import org.apache.tajo.catalog.CatalogConstants;
 import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.catalog.TableMeta;
 import org.apache.tajo.catalog.proto.CatalogProtos;
@@ -97,9 +96,10 @@ public class SequenceFileAppender extends FileAppender {
       isShuffle = false;
     }
 
-    this.delimiter = StringEscapeUtils.unescapeJava(this.meta.getOption(CatalogConstants.SEQUENCEFILE_DELIMITER,
CatalogConstants.DEFAULT_FIELD_DELIMITER)).charAt(0);
+    this.delimiter = StringEscapeUtils.unescapeJava(this.meta.getOption(StorageConstants.SEQUENCEFILE_DELIMITER,
+        StorageConstants.DEFAULT_FIELD_DELIMITER)).charAt(0);
     this.columnNum = schema.size();
-    String nullCharacters = StringEscapeUtils.unescapeJava(this.meta.getOption(CatalogConstants.SEQUENCEFILE_NULL));
+    String nullCharacters = StringEscapeUtils.unescapeJava(this.meta.getOption(StorageConstants.SEQUENCEFILE_NULL));
     if (StringUtils.isEmpty(nullCharacters)) {
       nullChars = NullDatum.get().asTextBytes();
     } else {
@@ -110,7 +110,7 @@ public class SequenceFileAppender extends FileAppender {
       throw new FileNotFoundException(path.toString());
     }
 
-    String codecName = this.meta.getOption(CatalogConstants.COMPRESSION_CODEC);
+    String codecName = this.meta.getOption(StorageConstants.COMPRESSION_CODEC);
     if(!StringUtils.isEmpty(codecName)){
       codecFactory = new CompressionCodecFactory(conf);
       codec = codecFactory.getCodecByClassName(codecName);
@@ -121,7 +121,7 @@ public class SequenceFileAppender extends FileAppender {
     }
 
     try {
-      String serdeClass = this.meta.getOption(CatalogConstants.SEQUENCEFILE_SERDE, TextSerializerDeserializer.class.getName());
+      String serdeClass = this.meta.getOption(StorageConstants.SEQUENCEFILE_SERDE, TextSerializerDeserializer.class.getName());
       serde = (SerializerDeserializer) Class.forName(serdeClass).newInstance();
     } catch (Exception e) {
       LOG.error(e.getMessage(), e);
@@ -135,7 +135,7 @@ public class SequenceFileAppender extends FileAppender {
       valueClass = Text.class;
     }
 
-    String type = this.meta.getOption(CatalogConstants.COMPRESSION_TYPE, CompressionType.NONE.name());
+    String type = this.meta.getOption(StorageConstants.COMPRESSION_TYPE, CompressionType.NONE.name());
     if (type.equals(CompressionType.BLOCK.name())) {
       writer = SequenceFile.createWriter(fs, conf, path, BytesWritable.class, valueClass,
CompressionType.BLOCK, codec);
     } else if (type.equals(CompressionType.RECORD.name())) {

http://git-wip-us.apache.org/repos/asf/tajo/blob/1d24a25a/tajo-storage/src/main/java/org/apache/tajo/storage/sequencefile/SequenceFileScanner.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/sequencefile/SequenceFileScanner.java
b/tajo-storage/src/main/java/org/apache/tajo/storage/sequencefile/SequenceFileScanner.java
index c5720e3..ccf3d9e 100644
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/sequencefile/SequenceFileScanner.java
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/sequencefile/SequenceFileScanner.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableUtils;
-import org.apache.tajo.catalog.CatalogConstants;
 import org.apache.tajo.catalog.Column;
 import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.catalog.TableMeta;
@@ -88,14 +87,14 @@ public class SequenceFileScanner extends FileScanner {
 
     reader = new SequenceFile.Reader(fs, fragment.getPath(), conf);
 
-    String nullCharacters = StringEscapeUtils.unescapeJava(this.meta.getOption(CatalogConstants.SEQUENCEFILE_NULL));
+    String nullCharacters = StringEscapeUtils.unescapeJava(this.meta.getOption(StorageConstants.SEQUENCEFILE_NULL));
     if (StringUtils.isEmpty(nullCharacters)) {
       nullChars = NullDatum.get().asTextBytes();
     } else {
       nullChars = nullCharacters.getBytes();
     }
 
-    String delim  = meta.getOption(CatalogConstants.SEQUENCEFILE_DELIMITER, CatalogConstants.DEFAULT_FIELD_DELIMITER);
+    String delim  = meta.getOption(StorageConstants.SEQUENCEFILE_DELIMITER, StorageConstants.DEFAULT_FIELD_DELIMITER);
     this.delimiter = StringEscapeUtils.unescapeJava(delim).charAt(0);
 
     this.start = fragment.getStartKey();
@@ -118,7 +117,7 @@ public class SequenceFileScanner extends FileScanner {
     prepareProjection(targets);
 
     try {
-      String serdeClass = this.meta.getOption(CatalogConstants.SEQUENCEFILE_SERDE, TextSerializerDeserializer.class.getName());
+      String serdeClass = this.meta.getOption(StorageConstants.SEQUENCEFILE_SERDE, TextSerializerDeserializer.class.getName());
       serde = (SerializerDeserializer) Class.forName(serdeClass).newInstance();
 
       if (serde instanceof BinarySerializerDeserializer)

http://git-wip-us.apache.org/repos/asf/tajo/blob/1d24a25a/tajo-storage/src/test/java/org/apache/tajo/storage/TestMergeScanner.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/test/java/org/apache/tajo/storage/TestMergeScanner.java b/tajo-storage/src/test/java/org/apache/tajo/storage/TestMergeScanner.java
index 354fbc2..d50c356 100644
--- a/tajo-storage/src/test/java/org/apache/tajo/storage/TestMergeScanner.java
+++ b/tajo-storage/src/test/java/org/apache/tajo/storage/TestMergeScanner.java
@@ -94,7 +94,7 @@ public class TestMergeScanner {
 
     Options options = new Options();
     TableMeta meta = CatalogUtil.newTableMeta(storeType, options);
-    meta.setOptions(CatalogUtil.newOptionsWithDefault(storeType));
+    meta.setOptions(StorageUtil.newPhysicalProperties(storeType));
 
     Path table1Path = new Path(testDir, storeType + "_1.data");
     Appender appender1 = StorageManagerFactory.getStorageManager(conf).getAppender(meta,
schema, table1Path);

http://git-wip-us.apache.org/repos/asf/tajo/blob/1d24a25a/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorageManager.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorageManager.java b/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorageManager.java
index be8b6de..8d1d0b3 100644
--- a/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorageManager.java
+++ b/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorageManager.java
@@ -110,7 +110,7 @@ public class TestStorageManager {
     final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
         .numDataNodes(1).build();
 
-    int testCount = 100;
+    int testCount = 10;
     Path tablePath = new Path("/testGetSplit");
     try {
       DistributedFileSystem fs = cluster.getFileSystem();
@@ -165,7 +165,7 @@ public class TestStorageManager {
     final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
         .numDataNodes(2).build();
 
-    int testCount = 100;
+    int testCount = 10;
     Path tablePath = new Path("/testGetSplitWithBlockStorageLocationsBatching");
     try {
       DistributedFileSystem fs = cluster.getFileSystem();

http://git-wip-us.apache.org/repos/asf/tajo/blob/1d24a25a/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorages.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorages.java b/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorages.java
index a500f09..8a700ac 100644
--- a/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorages.java
+++ b/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorages.java
@@ -146,7 +146,7 @@ public class TestStorages {
     schema.addColumn("score", Type.FLOAT4);
 
     TableMeta meta = CatalogUtil.newTableMeta(storeType);
-    meta.setOptions(CatalogUtil.newOptionsWithDefault(storeType));
+    meta.setOptions(StorageUtil.newPhysicalProperties(storeType));
 
     Path tablePath = new Path(testDir, "testProjection.data");
     Appender appender = StorageManagerFactory.getStorageManager(conf).getAppender(meta, schema,
tablePath);
@@ -209,7 +209,7 @@ public class TestStorages {
 
     Options options = new Options();
     TableMeta meta = CatalogUtil.newTableMeta(storeType, options);
-    meta.setOptions(CatalogUtil.newOptionsWithDefault(storeType));
+    meta.setOptions(StorageUtil.newPhysicalProperties(storeType));
 
     Path tablePath = new Path(testDir, "testVariousTypes.data");
     Appender appender = StorageManagerFactory.getStorageManager(conf).getAppender(meta, schema,
tablePath);
@@ -271,11 +271,11 @@ public class TestStorages {
 
     Options options = new Options();
     TableMeta meta = CatalogUtil.newTableMeta(storeType, options);
-    meta.setOptions(CatalogUtil.newOptionsWithDefault(storeType));
-    meta.putOption(CatalogConstants.CSVFILE_NULL, "\\\\N");
-    meta.putOption(CatalogConstants.RCFILE_NULL, "\\\\N");
-    meta.putOption(CatalogConstants.RCFILE_SERDE, TextSerializerDeserializer.class.getName());
-    meta.putOption(CatalogConstants.SEQUENCEFILE_NULL, "\\");
+    meta.setOptions(StorageUtil.newPhysicalProperties(storeType));
+    meta.putOption(StorageConstants.CSVFILE_NULL, "\\\\N");
+    meta.putOption(StorageConstants.RCFILE_NULL, "\\\\N");
+    meta.putOption(StorageConstants.RCFILE_SERDE, TextSerializerDeserializer.class.getName());
+    meta.putOption(StorageConstants.SEQUENCEFILE_NULL, "\\");
 
     Path tablePath = new Path(testDir, "testVariousTypes.data");
     Appender appender = StorageManagerFactory.getStorageManager(conf).getAppender(meta, schema,
tablePath);
@@ -360,7 +360,7 @@ public class TestStorages {
 
     Options options = new Options();
     TableMeta meta = CatalogUtil.newTableMeta(storeType, options);
-    meta.putOption(CatalogConstants.CSVFILE_SERDE, TextSerializerDeserializer.class.getName());
+    meta.putOption(StorageConstants.CSVFILE_SERDE, TextSerializerDeserializer.class.getName());
 
     Path tablePath = new Path(testDir, "testVariousTypes.data");
     Appender appender = StorageManagerFactory.getStorageManager(conf).getAppender(meta, schema,
tablePath);
@@ -429,7 +429,7 @@ public class TestStorages {
 
     Options options = new Options();
     TableMeta meta = CatalogUtil.newTableMeta(storeType, options);
-    meta.putOption(CatalogConstants.RCFILE_SERDE, BinarySerializerDeserializer.class.getName());
+    meta.putOption(StorageConstants.RCFILE_SERDE, BinarySerializerDeserializer.class.getName());
 
     Path tablePath = new Path(testDir, "testVariousTypes.data");
     Appender appender = StorageManagerFactory.getStorageManager(conf).getAppender(meta, schema,
tablePath);
@@ -498,7 +498,7 @@ public class TestStorages {
 
     Options options = new Options();
     TableMeta meta = CatalogUtil.newTableMeta(storeType, options);
-    meta.putOption(CatalogConstants.SEQUENCEFILE_SERDE, TextSerializerDeserializer.class.getName());
+    meta.putOption(StorageConstants.SEQUENCEFILE_SERDE, TextSerializerDeserializer.class.getName());
 
     Path tablePath = new Path(testDir, "testVariousTypes.data");
     Appender appender = StorageManagerFactory.getStorageManager(conf).getAppender(meta, schema,
tablePath);
@@ -567,7 +567,7 @@ public class TestStorages {
 
     Options options = new Options();
     TableMeta meta = CatalogUtil.newTableMeta(storeType, options);
-    meta.putOption(CatalogConstants.SEQUENCEFILE_SERDE, BinarySerializerDeserializer.class.getName());
+    meta.putOption(StorageConstants.SEQUENCEFILE_SERDE, BinarySerializerDeserializer.class.getName());
 
     Path tablePath = new Path(testDir, "testVariousTypes.data");
     Appender appender = StorageManagerFactory.getStorageManager(conf).getAppender(meta, schema,
tablePath);

http://git-wip-us.apache.org/repos/asf/tajo/blob/1d24a25a/tajo-storage/src/test/java/org/apache/tajo/storage/v2/TestStorages.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/test/java/org/apache/tajo/storage/v2/TestStorages.java b/tajo-storage/src/test/java/org/apache/tajo/storage/v2/TestStorages.java
index 140aa09..be5b096 100644
--- a/tajo-storage/src/test/java/org/apache/tajo/storage/v2/TestStorages.java
+++ b/tajo-storage/src/test/java/org/apache/tajo/storage/v2/TestStorages.java
@@ -95,7 +95,7 @@ public class TestStorages {
       schema.addColumn("age", Type.INT8);
 
       TableMeta meta = CatalogUtil.newTableMeta(storeType);
-      meta.setOptions(CatalogUtil.newOptionsWithDefault(storeType));
+      meta.setOptions(StorageUtil.newPhysicalProperties(storeType));
       Path tablePath = new Path(testDir, "Splitable.data");
       Appender appender = StorageManagerFactory.getStorageManager(conf).getAppender(meta,
schema, tablePath);
       appender.enableStats();
@@ -148,7 +148,7 @@ public class TestStorages {
     schema.addColumn("score", Type.FLOAT4);
 
     TableMeta meta = CatalogUtil.newTableMeta(storeType);
-    meta.setOptions(CatalogUtil.newOptionsWithDefault(storeType));
+    meta.setOptions(StorageUtil.newPhysicalProperties(storeType));
 
     Path tablePath = new Path(testDir, "testProjection.data");
     Appender appender = StorageManagerFactory.getStorageManager(conf).getAppender(meta, schema,
tablePath);
@@ -209,7 +209,7 @@ public class TestStorages {
 
     Options options = new Options();
     TableMeta meta = CatalogUtil.newTableMeta(storeType, options);
-    meta.setOptions(CatalogUtil.newOptionsWithDefault(storeType));
+    meta.setOptions(StorageUtil.newPhysicalProperties(storeType));
 
     Path tablePath = new Path(testDir, "testVariousTypes.data");
     Appender appender = StorageManagerFactory.getStorageManager(conf).getAppender(meta, schema,
tablePath);


Mime
View raw message