hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From amareshw...@apache.org
Subject svn commit: r1456360 [1/2] - in /hive/branches/HIVE-4115/ql/src: java/org/apache/hadoop/hive/ql/cube/ java/org/apache/hadoop/hive/ql/cube/metadata/ test/org/apache/hadoop/hive/ql/cube/ test/org/apache/hadoop/hive/ql/cube/metadata/
Date Thu, 14 Mar 2013 08:09:31 GMT
Author: amareshwari
Date: Thu Mar 14 08:09:31 2013
New Revision: 1456360

URL: http://svn.apache.org/r1456360
Log:
Add cube metastore

Added:
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/AbstractCubeTable.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/BaseDimension.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ColumnMeasure.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Cube.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimension.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimensionTable.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMeasure.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeTableType.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ExprMeasure.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HDFSStorage.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/InlineDimension.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/MetastoreConstants.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/MetastoreUtil.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Named.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ReferencedDimension.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/StorageConstants.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/TableReference.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/metadata/
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/metadata/TestCubeMetastoreClient.java

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/AbstractCubeTable.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/AbstractCubeTable.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/AbstractCubeTable.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/AbstractCubeTable.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,76 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.metadata.Table;
+
+public abstract class AbstractCubeTable implements Named {
+  private final String name;
+  private final List<FieldSchema> columns;
+  private final Map<String, String> properties = new HashMap<String, String>();
+
+  protected AbstractCubeTable(String name, List<FieldSchema> columns,
+      Map<String, String> props) {
+    this.name = name;
+    this.columns = columns;
+    this.properties.putAll(props);
+  }
+
+  protected AbstractCubeTable(Table hiveTable) {
+    this.name = hiveTable.getTableName();
+    this.columns = hiveTable.getCols();
+    this.properties.putAll(hiveTable.getParameters());
+  }
+
+  public abstract CubeTableType getTableType();
+
+  public abstract Set<String> getStorages();
+
+  public Map<String, String> getProperties() {
+    return properties;
+  }
+
+  protected void addProperties() {
+    properties.put(MetastoreConstants.TABLE_TYPE_KEY, getTableType().name());
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public List<FieldSchema> getColumns() {
+    return columns;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (obj == null) {
+      return false;
+    }
+    if (getClass() != obj.getClass()) {
+      return false;
+    }
+    AbstractCubeTable other = (AbstractCubeTable) obj;
+
+    if (!this.getName().equalsIgnoreCase(other.getName())) {
+      return false;
+    }
+    if (this.getColumns() == null) {
+      if (other.getColumns() != null) {
+        return false;
+      }
+    } else {
+      if (!this.getColumns().equals(other.getColumns())) {
+        return false;
+      }
+    }
+    return true;
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/BaseDimension.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/BaseDimension.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/BaseDimension.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/BaseDimension.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,79 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+
+public class BaseDimension {
+  private final FieldSchema column;
+
+  public BaseDimension(FieldSchema column) {
+    this.column = column;
+    assert (column != null);
+    assert (column.getName() != null);
+    assert (column.getType() != null);
+  }
+
+  public FieldSchema getColumn() {
+    return column;
+  }
+
+  public String getName() {
+    return column.getName();
+  }
+
+  public String getType() {
+    return column.getType();
+  }
+
+  public void addProperties(Map<String, String> props) {
+    props.put(MetastoreUtil.getDimTypePropertyKey(column.getName()),
+        column.getType());
+  }
+
+  public BaseDimension(String name, Map<String, String> props) {
+    String type = getDimType(name, props);
+    this.column = new FieldSchema(name, type, "");
+  }
+
+  public static String getDimType(String name, Map<String, String> props) {
+    return props.get(MetastoreUtil.getDimTypePropertyKey(name));
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + ((getName() == null) ? 0 :
+      getName().toLowerCase().hashCode());
+    result = prime * result + ((getType() == null) ? 0 :
+      getType().toLowerCase().hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    BaseDimension other = (BaseDimension)obj;
+    if (this.getName() == null) {
+      if (other.getName() != null) {
+        return false;
+      }
+    } else if (!this.getName().equalsIgnoreCase(other.getName())) {
+      return false;
+    }
+    if (this.getType() == null) {
+      if (other.getType() != null) {
+        return false;
+      }
+    } else if (!this.getType().equalsIgnoreCase(other.getType())) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public String toString() {
+    String str = getName() + ":" + getType();
+    return str;
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ColumnMeasure.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ColumnMeasure.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ColumnMeasure.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ColumnMeasure.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,41 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+
+public final class ColumnMeasure extends CubeMeasure {
+  public ColumnMeasure(FieldSchema column, String formatString,
+      String aggregate, String unit) {
+    super(column, formatString, aggregate, unit);
+  }
+
+  public ColumnMeasure(FieldSchema column) {
+    this(column, null, null, null);
+  }
+
+  public ColumnMeasure(String name, Map<String, String> props) {
+    super(name, props);
+  }
+
+  @Override
+  public void addProperties(Map<String, String> props) {
+    super.addProperties(props);
+    props.put(MetastoreUtil.getMeasureClassPropertyKey(getName()),
+        this.getClass().getCanonicalName());
+  }
+
+  @Override
+  public int hashCode() {
+    return super.hashCode();
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (!super.equals(obj)) {
+      return false;
+    }
+    return true;
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Cube.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Cube.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Cube.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Cube.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,149 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.metadata.Table;
+
+public final class Cube extends AbstractCubeTable {
+  private final Set<CubeMeasure> measures;
+  private final Set<CubeDimension> dimensions;
+  private static final List<FieldSchema> columns = new ArrayList<FieldSchema>();
+  static {
+    columns.add(new FieldSchema("dummy", "string", "dummy column"));
+  }
+
+  public Cube(String name, Set<CubeMeasure> measures,
+      Set<CubeDimension> dimensions) {
+    super(name, columns, new HashMap<String, String>());
+    this.measures = measures;
+    this.dimensions = dimensions;
+    addProperties();
+  }
+
+  public Cube(Table tbl) {
+    super(tbl);
+    this.measures = getMeasures(getName(), getProperties());
+    this.dimensions = getDimensions(getName(), getProperties());
+  }
+
+  public Set<CubeMeasure> getMeasures() {
+    return measures;
+  }
+
+  public Set<CubeDimension> getDimensions() {
+    return dimensions;
+  }
+
+  @Override
+  public CubeTableType getTableType() {
+    return CubeTableType.CUBE;
+  }
+
+  @Override
+  public Set<String> getStorages() {
+    return null;
+  }
+
+  @Override
+  public void addProperties() {
+    super.addProperties();
+    getProperties().put(MetastoreUtil.getCubeMeasureListKey(getName()),
+        MetastoreUtil.getNamedStr(measures));
+    addMeasures(getProperties(), measures);
+    getProperties().put(MetastoreUtil.getCubeDimensionListKey(getName()),
+        MetastoreUtil.getNamedStr(dimensions));
+    addDimensions(getProperties(), dimensions);
+  }
+
+  public static void addMeasures(Map<String, String> props,
+      Set<CubeMeasure> measures) {
+    for (CubeMeasure measure : measures) {
+      measure.addProperties(props);
+    }
+  }
+
+  public static void addDimensions(Map<String, String> props,
+      Set<CubeDimension> dimensions) {
+    for (CubeDimension dimension : dimensions) {
+      dimension.addProperties(props);
+    }
+  }
+
+  public static Set<CubeMeasure> getMeasures(String name,
+      Map<String, String> props) {
+    Set<CubeMeasure> measures = new HashSet<CubeMeasure>();
+    String measureStr = props.get(MetastoreUtil.getCubeMeasureListKey(name));
+    String[] names = measureStr.split(",");
+    for (String measureName : names) {
+      String className = props.get(MetastoreUtil.getMeasureClassPropertyKey(
+          measureName));
+      CubeMeasure measure;
+      try {
+        Class<?> clazz = Class.forName(className);
+        Constructor<?> constructor;
+          constructor = clazz.getConstructor(String.class, Map.class);
+        measure = (CubeMeasure) constructor.newInstance(new Object[]
+           {measureName, props});
+      } catch (ClassNotFoundException e) {
+        throw new IllegalArgumentException("Invalid measure", e);
+      } catch (SecurityException e) {
+        throw new IllegalArgumentException("Invalid measure", e);
+      } catch (NoSuchMethodException e) {
+        throw new IllegalArgumentException("Invalid measure", e);
+      } catch (IllegalArgumentException e) {
+        throw new IllegalArgumentException("Invalid measure", e);
+      } catch (InstantiationException e) {
+        throw new IllegalArgumentException("Invalid measure", e);
+      } catch (IllegalAccessException e) {
+        throw new IllegalArgumentException("Invalid measure", e);
+      } catch (InvocationTargetException e) {
+        throw new IllegalArgumentException("Invalid measure", e);
+      }
+      measures.add(measure);
+    }
+    return measures;
+  }
+
+  public static Set<CubeDimension> getDimensions(String name,
+      Map<String, String> props) {
+    Set<CubeDimension> dimensions = new HashSet<CubeDimension>();
+    String dimStr = props.get(MetastoreUtil.getCubeDimensionListKey(name));
+    String[] names = dimStr.split(",");
+    for (String dimName : names) {
+      CubeDimension dim = new CubeDimension(dimName, props);
+      dimensions.add(dim);
+    }
+    return dimensions;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (!super.equals(obj)) {
+      return false;
+    }
+    Cube other = (Cube) obj;
+    if (this.getMeasures() == null) {
+      if (other.getMeasures() != null) {
+        return false;
+      }
+    } else if (!this.getMeasures().equals(other.getMeasures())) {
+      return false;
+    }
+    if (this.getDimensions() == null) {
+      if (other.getDimensions() != null) {
+        return false;
+      }
+    } else if (!this.getDimensions().equals(other.getDimensions())) {
+      return false;
+    }
+    return true;
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimension.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimension.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimension.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimension.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,127 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class CubeDimension implements Named {
+  private final String name;
+  private final List<BaseDimension> hierarchy;
+
+  public CubeDimension(String name, List<BaseDimension> hierarchy) {
+    this.name = name;
+    this.hierarchy = hierarchy;
+    assert (name != null);
+    assert (hierarchy != null);
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public List<BaseDimension> getHierarchy() {
+    return hierarchy;
+  }
+
+  public void addProperties(Map<String, String> props) {
+    for (int i =0; i < hierarchy.size(); i++) {
+      BaseDimension dim = hierarchy.get(i);
+      props.put(MetastoreUtil.getHierachyElementKeyName(name, i),
+          getHierarchyElement(dim));
+      dim.addProperties(props);
+    }
+  }
+
+  public static String getHierarchyElement(BaseDimension dim) {
+    return dim.getName() + "," + dim.getClass().getCanonicalName();
+  }
+
+  public CubeDimension(String name, Map<String, String> props) {
+    this.name = name;
+    this.hierarchy = getHiearachy(name, props);
+  }
+
+  public static List<BaseDimension> getHiearachy(String name,
+      Map<String, String> props) {
+    Map<Integer, String> hierarchyElements = new HashMap<Integer, String>();
+    for (String param : props.keySet()) {
+      if (param.startsWith(MetastoreUtil.getHierachyElementKeyPFX(name))) {
+        hierarchyElements.put(MetastoreUtil.getHierachyElementIndex(name, param),
+            props.get(param));
+      }
+    }
+    List<BaseDimension> hierarchy = new ArrayList<BaseDimension>(
+        hierarchyElements.size());
+    for (int i = 0; i < hierarchyElements.size(); i++) {
+      String hierarchyElement = hierarchyElements.get(i);
+      String[] elements = hierarchyElement.split(",");
+      String dimName = elements[0];
+      String className = elements[1];
+      BaseDimension dim;
+      try {
+        Class<?> clazz = Class.forName(className);
+        Constructor<?> constructor;
+        constructor = clazz.getConstructor(String.class, Map.class);
+        dim = (BaseDimension) constructor.newInstance(new Object[]
+            {dimName, props});
+      } catch (ClassNotFoundException e) {
+        throw new IllegalArgumentException("Invalid Dimension", e);
+      } catch (SecurityException e) {
+        throw new IllegalArgumentException("Invalid Dimension", e);
+      } catch (NoSuchMethodException e) {
+        throw new IllegalArgumentException("Invalid Dimension", e);
+      } catch (IllegalArgumentException e) {
+        throw new IllegalArgumentException("Invalid Dimension", e);
+      } catch (InstantiationException e) {
+        throw new IllegalArgumentException("Invalid Dimension", e);
+      } catch (IllegalAccessException e) {
+        throw new IllegalArgumentException("Invalid Dimension", e);
+      } catch (InvocationTargetException e) {
+        throw new IllegalArgumentException("Invalid Dimension", e);
+      }
+      hierarchy.add(dim);
+    }
+    return hierarchy;
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + ((getName() == null) ? 0 :
+      getName().toLowerCase().hashCode());
+    result = prime * result + ((getHierarchy() == null) ? 0 :
+      getHierarchy().hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    CubeDimension other = (CubeDimension)obj;
+    if (this.getName() == null) {
+      if (other.getName() != null) {
+        return false;
+      }
+    } else if (!this.getName().equalsIgnoreCase(other.getName())) {
+      return false;
+    }
+    if (this.getHierarchy() == null) {
+      if (other.getHierarchy() != null) {
+        return false;
+      }
+    } else if (!this.getHierarchy().equals(other.getHierarchy())) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public String toString() {
+    String str = name;
+    str += ", hierarchy:" + MetastoreUtil.getObjectStr(hierarchy);
+    return str;
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimensionTable.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimensionTable.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimensionTable.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimensionTable.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,167 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.metadata.Table;
+
+public final class CubeDimensionTable extends AbstractCubeTable {
+  private final Map<String, TableReference> dimensionReferences;
+  private final Map<String, UpdatePeriod> snapshotDumpPeriods;
+
+  public CubeDimensionTable(String dimName, List<FieldSchema> columns) {
+    this(dimName, columns, new HashMap<String, TableReference>());
+  }
+
+  public CubeDimensionTable(String dimName, List<FieldSchema> columns,
+      Map<String, TableReference> dimensionReferences) {
+    this(dimName, columns, dimensionReferences, null);
+  }
+
+  public CubeDimensionTable(String dimName, List<FieldSchema> columns,
+      Map<String, TableReference> dimensionReferences,
+      Map<String, UpdatePeriod> snapshotDumpPeriods) {
+    this(dimName, columns, dimensionReferences, new HashMap<String, String>(),
+        snapshotDumpPeriods);
+  }
+
+  public CubeDimensionTable(String dimName, List<FieldSchema> columns,
+      Map<String, TableReference> dimensionReferences,
+      Map<String, String> properties,
+      Map<String, UpdatePeriod> snapshotDumpPeriods) {
+    super(dimName, columns, properties);
+    this.dimensionReferences = dimensionReferences;
+    this.snapshotDumpPeriods = snapshotDumpPeriods;
+    addProperties();
+  }
+
+  public CubeDimensionTable(Table tbl) {
+    super(tbl);
+    this.dimensionReferences = getDimensionReferences(getProperties());
+    this.snapshotDumpPeriods = getDumpPeriods(getName(), getProperties());
+  }
+
+  @Override
+  public CubeTableType getTableType() {
+    return CubeTableType.DIMENSION;
+  }
+
+  @Override
+  protected void addProperties() {
+    super.addProperties();
+    addDimensionReferenceProperties(getProperties(), dimensionReferences);
+    addSnapshotPeriods(getName(), getProperties(), snapshotDumpPeriods);
+  }
+
+  public Map<String, TableReference> getDimensionReferences() {
+    return dimensionReferences;
+  }
+
+  public Map<String, UpdatePeriod> getSnapshotDumpPeriods() {
+    return snapshotDumpPeriods;
+  }
+
+  public static void addSnapshotPeriods(String name, Map<String, String> props,
+      Map<String, UpdatePeriod> snapshotDumpPeriods) {
+    if (snapshotDumpPeriods != null) {
+      props.put(MetastoreUtil.getDimensionStorageListKey(name),
+          MetastoreUtil.getStr(snapshotDumpPeriods.keySet()));
+      for (Map.Entry<String, UpdatePeriod> entry : snapshotDumpPeriods.entrySet())
+      {
+        if (entry.getValue() != null) {
+          props.put(MetastoreUtil.getDimensionDumpPeriodKey(name, entry.getKey()),
+            entry.getValue().name());
+        }
+      }
+    }
+  }
+
+  public static void addDimensionReferenceProperties(Map<String, String> props,
+      Map<String, TableReference> dimensionReferences) {
+    if (dimensionReferences != null) {
+      for (Map.Entry<String, TableReference> entry :
+        dimensionReferences.entrySet()) {
+        props.put(MetastoreUtil.getDimensionSrcReferenceKey(entry.getKey()),
+            MetastoreUtil.getDimensionDestReference(entry.getValue()));
+      }
+    }
+  }
+
+  public static Map<String, TableReference> getDimensionReferences(
+      Map<String, String> params) {
+    Map<String, TableReference> dimensionReferences =
+        new HashMap<String, TableReference>();
+    for (String param : params.keySet()) {
+      if (param.startsWith(MetastoreConstants.DIM_KEY_PFX)) {
+        String key = param.replace(MetastoreConstants.DIM_KEY_PFX, "");
+        String toks[] = key.split("\\.+");
+        String dimName = toks[0];
+        String value = params.get(MetastoreUtil.getDimensionSrcReferenceKey(dimName));
+        if (value != null) {
+          dimensionReferences.put(dimName, new TableReference(value));
+        }
+      }
+    }
+    return dimensionReferences;
+  }
+
+  public static Map<String, UpdatePeriod> getDumpPeriods(String name,
+      Map<String, String> params) {
+    String storagesStr = params.get(MetastoreUtil.getDimensionStorageListKey(
+        name));
+    if (storagesStr != null) {
+      Map<String, UpdatePeriod> dumpPeriods = new HashMap<String, UpdatePeriod>();
+      String[] storages = storagesStr.split(",");
+      for (String storage : storages) {
+        String dumpPeriod = params.get(MetastoreUtil.getDimensionDumpPeriodKey(
+            name, storage));
+        if (dumpPeriod != null) {
+          dumpPeriods.put(storage, UpdatePeriod.valueOf(dumpPeriod));
+        } else {
+          dumpPeriods.put(storage, null);
+        }
+      }
+      return dumpPeriods;
+    }
+    return null;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (!super.equals(obj)) {
+      return false;
+    }
+    CubeDimensionTable other = (CubeDimensionTable)obj;
+
+    if (this.getDimensionReferences() == null) {
+      if (other.getDimensionReferences() != null) {
+        return false;
+      }
+    } else {
+      if (!this.getDimensionReferences().equals(
+          other.getDimensionReferences())) {
+        return false;
+      }
+    }
+    if (this.getSnapshotDumpPeriods() == null) {
+      if (other.getSnapshotDumpPeriods() != null) {
+        return false;
+      }
+    } else {
+      if (!this.getSnapshotDumpPeriods().equals(
+          other.getSnapshotDumpPeriods())) {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  @Override
+  public Set<String> getStorages() {
+    return snapshotDumpPeriods.keySet();
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,162 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.metadata.Table;
+
+public final class CubeFactTable extends AbstractCubeTable {
+  private final String cubeName;
+  private final Map<String, List<UpdatePeriod>> storageUpdatePeriods;
+
+  public CubeFactTable(String cubeName, String factName,
+      List<FieldSchema> columns) {
+    this(cubeName, factName, columns, new HashMap<String, List<UpdatePeriod>>());
+  }
+
+  public CubeFactTable(Table hiveTable) {
+    super(hiveTable);
+    this.storageUpdatePeriods = getUpdatePeriods(getName(), getProperties());
+    this.cubeName = getProperties().get(
+        MetastoreUtil.getFactCubeNameKey(getName()));
+  }
+
+  public CubeFactTable(String cubeName, String factName,
+      List<FieldSchema> columns,
+      Map<String, List<UpdatePeriod>> storageUpdatePeriods) {
+    this(cubeName, factName, columns, storageUpdatePeriods,
+        new HashMap<String, String>());
+  }
+
+  public CubeFactTable(String cubeName, String factName,
+      List<FieldSchema> columns,
+      Map<String, List<UpdatePeriod>> storageUpdatePeriods,
+      Map<String, String> properties) {
+    super(factName, columns, properties);
+    this.cubeName = cubeName;
+    this.storageUpdatePeriods = storageUpdatePeriods;
+    addProperties();
+  }
+
+  @Override
+  protected void addProperties() {
+    super.addProperties();
+    getProperties().put(MetastoreUtil.getFactCubeNameKey(getName()), cubeName);
+    addUpdatePeriodProperies(getName(), getProperties(), storageUpdatePeriods);
+  }
+
+  public static void addUpdatePeriodProperies(String name,
+      Map<String, String> props,
+      Map<String, List<UpdatePeriod>> updatePeriods) {
+    if (updatePeriods != null) {
+      props.put(MetastoreUtil.getFactStorageListKey(name),
+          MetastoreUtil.getStr(updatePeriods.keySet()));
+      for (Map.Entry<String, List<UpdatePeriod>> entry :
+        updatePeriods.entrySet()) {
+        props.put(MetastoreUtil.getFactUpdatePeriodKey(name, entry.getKey()),
+            MetastoreUtil.getNamedStr(entry.getValue()));
+      }
+    }
+  }
+
+  public static Map<String, List<UpdatePeriod>> getUpdatePeriods(String name,
+      Map<String, String> props) {
+    Map<String, List<UpdatePeriod>> storageUpdatePeriods = new HashMap<String,
+        List<UpdatePeriod>>();
+    String storagesStr = props.get(MetastoreUtil.getFactStorageListKey(name));
+    String[] storages = storagesStr.split(",");
+    for (String storage : storages) {
+      String updatePeriodStr = props.get(MetastoreUtil.getFactUpdatePeriodKey(
+          name, storage));
+      String[] periods = updatePeriodStr.split(",");
+      List<UpdatePeriod> updatePeriods = new ArrayList<UpdatePeriod>();
+      for (String period : periods) {
+        updatePeriods.add(UpdatePeriod.valueOf(period));
+      }
+      storageUpdatePeriods.put(storage, updatePeriods);
+    }
+    return storageUpdatePeriods;
+  }
+
+  public Map<String, List<UpdatePeriod>> getUpdatePeriods() {
+    return storageUpdatePeriods;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (!super.equals(obj)) {
+      return false;
+    }
+
+    CubeFactTable other = (CubeFactTable)obj;
+    if (this.getUpdatePeriods() == null) {
+      if (other.getUpdatePeriods() != null) {
+        return false;
+      }
+    } else {
+      if (!this.getUpdatePeriods().equals(other.getUpdatePeriods())) {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  @Override
+  public CubeTableType getTableType() {
+    return CubeTableType.FACT;
+  }
+
+  public List<String> getPartitions(Date fromDate, Date toDate,
+      UpdatePeriod interval) {
+    String fmt = interval.format();
+    if (fmt != null) {
+      Calendar cal = Calendar.getInstance();
+      cal.setTime(fromDate);
+      List<String> summaries = new ArrayList<String>();
+      Date dt = cal.getTime();
+      while (dt.compareTo(toDate) < 0) {
+        summaries.add(new SimpleDateFormat(fmt).format(cal.getTime()));
+        cal.add(interval.calendarField(), 1);
+        dt = cal.getTime();
+      }
+      return summaries;
+    } else {
+      return null;
+    }
+  }
+
+  public static UpdatePeriod maxIntervalInRange(Date from, Date to,
+      Set<UpdatePeriod> updatePeriods) {
+    long diff = to.getTime() - from.getTime();
+    if (diff < UpdatePeriod.MIN_INTERVAL) {
+      return null;
+    }
+    UpdatePeriod max = null;
+    long minratio = diff / UpdatePeriod.MIN_INTERVAL;
+
+    for (UpdatePeriod i : updatePeriods) {
+      long tmpratio = diff / i.weight();
+      if (tmpratio == 0) {
+        // Interval larger than date difference
+        continue;
+      }
+      if (minratio > tmpratio) {
+        minratio = tmpratio;
+        max = i;
+      }
+    }
+    return max;
+  }
+
+  @Override
+  public Set<String> getStorages() {
+    return storageUpdatePeriods.keySet();
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMeasure.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMeasure.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMeasure.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMeasure.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,145 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+
+public class CubeMeasure implements Named {
+  private final String formatString;
+  private final String aggregate;
+  private final String unit;
+  private final FieldSchema column;
+
+  protected CubeMeasure(FieldSchema column, String formatString,
+      String aggregate, String unit) {
+    this.column = column;
+    assert (column != null);
+    assert (column.getName() != null);
+    assert (column.getType() != null);
+    this.formatString = formatString;
+    this.aggregate = aggregate;
+    this.unit = unit;
+  }
+
+  protected CubeMeasure(String name, Map<String, String> props) {
+    this.column = new FieldSchema(name,
+        props.get(MetastoreUtil.getMeasureTypePropertyKey(name)), "");
+    this.formatString = props.get(MetastoreUtil.getMeasureFormatPropertyKey(name));
+    this.aggregate = props.get(MetastoreUtil.getMeasureAggrPropertyKey(name));
+    this.unit = props.get(MetastoreUtil.getMeasureUnitPropertyKey(name));
+  }
+
+  public String getFormatString() {
+    return formatString;
+  }
+
+  public String getAggregate() {
+    return aggregate;
+  }
+
+  public String getUnit() {
+    return unit;
+  }
+
+  public FieldSchema getColumn() {
+    return column;
+  }
+
+  public String getName() {
+    return column.getName();
+  }
+
+  public String getType() {
+    return column.getType();
+  }
+
+  @Override
+  public String toString() {
+    String str = getName() + ":" + getType();
+    if (unit != null) {
+      str += ",unit:" + unit;
+    }
+    if (aggregate != null) {
+      str += ",aggregate:" + aggregate;
+    }
+    if (formatString != null) {
+      str += ",formatString:" + formatString;
+    }
+    return str;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (obj == null) {
+      return false;
+    }
+    if (getClass() != obj.getClass()) {
+      return false;
+    }
+    CubeMeasure other = (CubeMeasure) obj;
+    if (!this.getName().equalsIgnoreCase(other.getName())) {
+      return false;
+    }
+    if (!this.getType().equalsIgnoreCase(other.getType())) {
+      return false;
+    }
+    if (this.getUnit() == null) {
+      if (other.getUnit() != null) {
+        return false;
+      }
+    } else if (!this.getUnit().equalsIgnoreCase(other.getUnit())) {
+      return false;
+    }
+    if (this.getAggregate() == null) {
+      if (other.getAggregate() != null) {
+        return false;
+      }
+    } else if (!this.getAggregate().equalsIgnoreCase(other.getAggregate())) {
+      return false;
+    }
+    if (this.getFormatString() == null) {
+      if (other.getFormatString() != null) {
+        return false;
+      }
+    } else if (!this.getFormatString().equalsIgnoreCase(
+        other.getFormatString())) {
+      return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + ((getName() == null) ? 0 :
+      getName().toLowerCase().hashCode());
+    result = prime * result + ((getType() == null) ? 0 :
+      getType().toLowerCase().hashCode());
+    result = prime * result + ((unit == null) ? 0 :
+      unit.toLowerCase().hashCode());
+    result = prime * result + ((aggregate == null) ? 0 :
+      aggregate.toLowerCase().hashCode());
+    result = prime * result + ((formatString == null) ? 0 :
+      formatString.toLowerCase().hashCode());
+    return result;
+  }
+
+  public void addProperties(Map<String, String> props) {
+    props.put(MetastoreUtil.getMeasureTypePropertyKey(getName()), getType());
+    if (unit != null) {
+      props.put(MetastoreUtil.getMeasureUnitPropertyKey(getName()), unit);
+    }
+    if (getFormatString() != null) {
+      props.put(MetastoreUtil.getMeasureFormatPropertyKey(getName()),
+          formatString);
+    }
+    if (aggregate != null) {
+      props.put(MetastoreUtil.getMeasureAggrPropertyKey(getName()), aggregate);
+    }
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,510 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+
+/**
+ * Wrapper class around Hive metastore to do cube metastore operations.
+ *
+ */
+public class CubeMetastoreClient {
+  private final Hive metastore;
+  private final HiveConf config;
+
+  private CubeMetastoreClient(HiveConf conf)
+      throws HiveException {
+    this.metastore = Hive.get(conf);
+    this.config = conf;
+  }
+
+  private static final Map<HiveConf, CubeMetastoreClient> clientMapping =
+      new HashMap<HiveConf, CubeMetastoreClient>();
+
+  public static CubeMetastoreClient getInstance(HiveConf conf)
+      throws HiveException {
+    if (clientMapping.get(conf) == null) {
+      clientMapping.put(conf, new CubeMetastoreClient(conf));
+    }
+    return clientMapping.get(conf);
+  }
+
+  private Hive getClient() {
+    return metastore;
+  }
+
+  public void close() {
+    Hive.closeCurrent();
+  }
+
+  private StorageDescriptor createStorageHiveTable(String tableName,
+      StorageDescriptor sd,
+      Map<String, String> parameters, TableType type,
+      List<FieldSchema> partCols) throws HiveException {
+    try {
+      Table tbl = getClient().newTable(tableName.toLowerCase());
+      tbl.getTTable().getParameters().putAll(parameters);
+      tbl.getTTable().setSd(sd);
+      if (partCols != null && partCols.size() != 0) {
+        tbl.setPartCols(partCols);
+      }
+      tbl.setTableType(type);
+      getClient().createTable(tbl);
+      return tbl.getTTable().getSd();
+    } catch (Exception e) {
+      throw new HiveException("Exception creating table", e);
+    }
+  }
+
+  private StorageDescriptor createCubeHiveTable(AbstractCubeTable table)
+      throws HiveException {
+    try {
+      Table tbl = getClient().newTable(table.getName().toLowerCase());
+      tbl.setTableType(TableType.MANAGED_TABLE);
+      tbl.getTTable().getSd().setCols(table.getColumns());
+      tbl.getTTable().getParameters().putAll(table.getProperties());
+      getClient().createTable(tbl);
+      return tbl.getTTable().getSd();
+    } catch (Exception e) {
+      throw new HiveException("Exception creating table", e);
+    }
+  }
+
+  private void createFactStorage(String factName, Storage storage,
+      StorageDescriptor parentSD, List<UpdatePeriod> updatePeriods)
+          throws HiveException {
+    for (UpdatePeriod updatePeriod : updatePeriods) {
+      createFactStorageUpdatePeriod(factName, storage, parentSD, updatePeriod);
+    }
+  }
+
+  private void createFactStorageUpdatePeriod(String factName, Storage storage,
+      StorageDescriptor parentSD, UpdatePeriod updatePeriod)
+          throws HiveException {
+    String storageTblName = MetastoreUtil.getFactStorageTableName(factName,
+        updatePeriod, storage.getPrefix());
+    createStorage(storageTblName, storage, parentSD);
+  }
+
+  private void createDimStorage(String dimName, Storage storage,
+      StorageDescriptor parentSD)
+          throws HiveException {
+    String storageTblName = MetastoreUtil.getDimStorageTableName(dimName,
+        storage.getPrefix());
+    createStorage(storageTblName, storage, parentSD);
+  }
+
+  private StorageDescriptor getStorageSD(Storage storage,
+      StorageDescriptor parentSD) throws HiveException {
+    StorageDescriptor physicalSd = new StorageDescriptor(parentSD);
+    storage.setSD(physicalSd);
+    return physicalSd;
+  }
+
+  private StorageDescriptor getCubeTableSd(AbstractCubeTable table)
+      throws HiveException {
+    Table cubeTbl = getTable(table.getName());
+    return cubeTbl.getTTable().getSd();
+  }
+
+  private void createStorage(String name,
+      Storage storage, StorageDescriptor parentSD) throws HiveException {
+    StorageDescriptor physicalSd = getStorageSD(storage, parentSD);
+    createStorageHiveTable(name,
+        physicalSd, storage.getTableParameters(),
+        storage.getTableType(), storage.getPartCols());
+  }
+
+  private Map<String, List<UpdatePeriod>> getUpdatePeriods(
+      Map<Storage, List<UpdatePeriod>> storageAggregatePeriods) {
+    if (storageAggregatePeriods != null) {
+      Map<String, List<UpdatePeriod>> updatePeriods =
+          new HashMap<String, List<UpdatePeriod>>();
+      for (Map.Entry<Storage, List<UpdatePeriod>> entry :
+        storageAggregatePeriods.entrySet()) {
+        updatePeriods.put(entry.getKey().getName(), entry.getValue());
+      }
+      return updatePeriods;
+    } else {
+      return null;
+    }
+  }
+
+  public void createCube(Cube cube) throws HiveException {
+    createCubeHiveTable(cube);
+  }
+
+  public void createCube(String name, Set<CubeMeasure> measures,
+      Set<CubeDimension> dimensions) throws HiveException {
+    Cube cube = new Cube(name, measures, dimensions);
+    createCube(cube);
+  }
+
+  public void createCubeFactTable(String cubeName, String factName,
+      List<FieldSchema> columns,
+      Map<Storage, List<UpdatePeriod>> storageAggregatePeriods)
+          throws HiveException {
+    CubeFactTable factTable = new CubeFactTable(cubeName, factName, columns,
+        getUpdatePeriods(storageAggregatePeriods));
+    createCubeTable(factTable, storageAggregatePeriods);
+  }
+
+  public void createCubeDimensionTable(String dimName,
+      List<FieldSchema> columns,
+      Map<String, TableReference> dimensionReferences, Set<Storage> storages)
+          throws HiveException {
+    CubeDimensionTable dimTable = new CubeDimensionTable(dimName, columns,
+        dimensionReferences);
+    createCubeTable(dimTable, storages);
+  }
+
+  private Map<String, UpdatePeriod> getDumpPeriods(
+      Map<Storage, UpdatePeriod> storageDumpPeriods) {
+    if (storageDumpPeriods != null) {
+      Map<String, UpdatePeriod> updatePeriods = new HashMap<String, UpdatePeriod>();
+      for (Map.Entry<Storage, UpdatePeriod> entry : storageDumpPeriods.entrySet()) {
+        updatePeriods.put(entry.getKey().getName(), entry.getValue());
+      }
+      return updatePeriods;
+    } else {
+      return null;
+    }
+  }
+
+  public void createCubeDimensionTable(String dimName,
+      List<FieldSchema> columns,
+      Map<String, TableReference> dimensionReferences,
+      Map<Storage, UpdatePeriod> dumpPeriods)
+          throws HiveException {
+    // add date partitions for storages with dumpPeriods
+    addDatePartitions(dumpPeriods);
+    CubeDimensionTable dimTable = new CubeDimensionTable(dimName, columns,
+        dimensionReferences, getDumpPeriods(dumpPeriods));
+    createCubeTable(dimTable, dumpPeriods.keySet());
+  }
+
+  private void addDatePartitions(Map<Storage, UpdatePeriod> dumpPeriods) {
+    for (Map.Entry<Storage, UpdatePeriod> entry : dumpPeriods.entrySet()) {
+      if (entry.getValue() != null) {
+        entry.getKey().addToPartCols(Storage.getDatePartition());
+      }
+    }
+  }
+
+  public void createCubeTable(CubeFactTable factTable,
+      Map<Storage, List<UpdatePeriod>> storageAggregatePeriods)
+          throws HiveException {
+    // create virtual cube table in metastore
+    StorageDescriptor sd = createCubeHiveTable(factTable);
+
+    if (storageAggregatePeriods != null) {
+      // create tables for each storage
+      for (Storage storage : storageAggregatePeriods.keySet()) {
+        // Add date partition for all facts.
+        storage.addToPartCols(Storage.getDatePartition());
+        createFactStorage(factTable.getName(), storage, sd,
+            storageAggregatePeriods.get(storage));
+      }
+    }
+  }
+
+  public void createCubeTable(CubeDimensionTable dimTable,
+      Set<Storage> storages) throws HiveException {
+    // create virtual cube table in metastore
+    StorageDescriptor sd = createCubeHiveTable(dimTable);
+
+    if (storages != null) {
+      // create tables for each storage
+      for (Storage storage : storages) {
+        createDimStorage(dimTable.getName(), storage, sd);
+      }
+    }
+  }
+
+  public void addStorage(CubeFactTable table, Storage storage,
+      List<UpdatePeriod> updatePeriods) throws HiveException {
+    //TODO add the update periods to cube table properties
+    createFactStorage(table.getName(), storage, getCubeTableSd(table),
+        updatePeriods);
+  }
+
+  public void addStorageUpdatePeriod(CubeFactTable table, Storage storage,
+      UpdatePeriod updatePeriod) throws HiveException {
+    //TODO add the update periods to cube table properties
+    createFactStorageUpdatePeriod(table.getName(),storage,
+        getStorageSD(storage, getCubeTableSd(table)), updatePeriod);
+  }
+
+  public void addColumn(AbstractCubeTable table, FieldSchema column) {
+    //TODO
+  }
+
+  public void addDimensionReference(AbstractCubeTable srcTable, String srcCol,
+      TableReference reference) {
+    //TODO
+  }
+
+  //public void addMeasure(CubeFactTable table, Measure measure) {
+  //TODO
+  //}
+
+  public void addUpdatePeriod(CubeFactTable table, UpdatePeriod updatePeriod) {
+    //TODO
+  }
+
+  public static List<String> getPartitionValues(Table tbl,
+      Map<String, String> partSpec) throws HiveException {
+    List<String> pvals = new ArrayList<String>();
+    for (FieldSchema field : tbl.getPartitionKeys()) {
+      String val = partSpec.get(field.getName());
+      if (val == null) {
+        throw new HiveException("partition spec is invalid. field.getName()" +
+            " does not exist in input.");
+      }
+      pvals.add(val);
+    }
+    return pvals;
+  }
+
+  public void addPartition(CubeFactTable table, Storage storage,
+      UpdatePeriod updatePeriod, Date partitionTimestamp)
+          throws HiveException {
+    String storageTableName = MetastoreUtil.getFactStorageTableName(
+        table.getName(), updatePeriod, storage.getPrefix());
+    addPartition(storageTableName, storage, getPartitionSpec(updatePeriod,
+        partitionTimestamp), false);
+  }
+
+  public void addPartition(CubeFactTable table, Storage storage,
+      UpdatePeriod updatePeriod, Date partitionTimestamp,
+      Map<String, String> partSpec)
+          throws HiveException {
+    String storageTableName = MetastoreUtil.getFactStorageTableName(
+        table.getName(), updatePeriod, storage.getPrefix());
+    partSpec.putAll(getPartitionSpec(updatePeriod,
+        partitionTimestamp));
+    addPartition(storageTableName, storage, partSpec, false);
+  }
+
+  public void addPartition(CubeDimensionTable table, Storage storage,
+      Date partitionTimestamp) throws HiveException {
+    String storageTableName = MetastoreUtil.getDimStorageTableName(
+        table.getName(), storage.getPrefix());
+    addPartition(storageTableName, storage, getPartitionSpec(table.
+        getSnapshotDumpPeriods().get(storage.getName()), partitionTimestamp),
+        true);
+  }
+
+  private Map<String, String> getPartitionSpec(
+      UpdatePeriod updatePeriod, Date partitionTimestamp) {
+    Map<String, String> partSpec = new HashMap<String, String>();
+    SimpleDateFormat dateFormat = new SimpleDateFormat(updatePeriod.format());
+    String pval = dateFormat.format(partitionTimestamp);
+    partSpec.put(Storage.getDatePartitionKey(), pval);
+    return partSpec;
+  }
+
+  private void addPartition(String storageTableName, Storage storage,
+      Map<String, String> partSpec, boolean makeLatest) throws HiveException {
+    storage.addPartition(storageTableName, partSpec, config, makeLatest);
+  }
+
+  boolean tableExists(String cubeName)
+      throws HiveException {
+    try {
+      return (getClient().getTable(cubeName.toLowerCase(), false) != null);
+    } catch (HiveException e) {
+      throw new HiveException("Could not check whether table exists", e);
+    }
+  }
+
+  boolean factPartitionExists(CubeFactTable fact,
+      Storage storage, UpdatePeriod updatePeriod,
+      Date partitionTimestamp) throws HiveException {
+    String storageTableName = MetastoreUtil.getFactStorageTableName(
+        fact.getName(), updatePeriod, storage.getPrefix());
+    return partitionExists(storageTableName, updatePeriod, partitionTimestamp);
+  }
+
+  boolean factPartitionExists(CubeFactTable fact,
+      Storage storage, UpdatePeriod updatePeriod,
+      Date partitionTimestamp, Map<String, String> partSpec) throws HiveException {
+    String storageTableName = MetastoreUtil.getFactStorageTableName(
+        fact.getName(), updatePeriod, storage.getPrefix());
+    return partitionExists(storageTableName, updatePeriod, partitionTimestamp, partSpec);
+  }
+
+  boolean partitionExists(String storageTableName, UpdatePeriod updatePeriod,
+      Date partitionTimestamp)
+          throws HiveException {
+    return partitionExists(storageTableName,
+        getPartitionSpec(updatePeriod, partitionTimestamp));
+  }
+
+  boolean partitionExists(String storageTableName, UpdatePeriod updatePeriod,
+      Date partitionTimestamp, Map<String, String> partSpec)
+          throws HiveException {
+    partSpec.putAll(getPartitionSpec(updatePeriod, partitionTimestamp));
+    return partitionExists(storageTableName, partSpec);
+  }
+
+  private boolean partitionExists(String storageTableName,
+      Map<String, String> partSpec) throws HiveException {
+    try {
+      Table storageTbl = getTable(storageTableName);
+      Partition p = getClient().getPartition(storageTbl, partSpec, false);
+      return (p != null && p.getTPartition() != null);
+    } catch (HiveException e) {
+      throw new HiveException("Could not check whether table exists", e);
+    }
+  }
+
+  boolean dimPartitionExists(CubeDimensionTable dim,
+      Storage storage, Date partitionTimestamp) throws HiveException {
+    String storageTableName = MetastoreUtil.getDimStorageTableName(
+        dim.getName(), storage.getPrefix());
+    return partitionExists(storageTableName,
+        dim.getSnapshotDumpPeriods().get(storage.getName()), partitionTimestamp);
+  }
+
+  boolean latestPartitionExists(CubeDimensionTable dim,
+      Storage storage) throws HiveException {
+    String storageTableName = MetastoreUtil.getDimStorageTableName(
+        dim.getName(), storage.getPrefix());
+    return partitionExists(storageTableName, Storage.getLatestPartSpec());
+  }
+
+  public Table getHiveTable(String tableName) throws HiveException {
+    return getTable(tableName);
+  }
+
+  public Table getStorageTable(String tableName) throws HiveException {
+    return getHiveTable(tableName);
+  }
+
+  private Table getTable(String tableName)  throws HiveException {
+    Table tbl;
+    try {
+      tbl = getClient().getTable(tableName.toLowerCase());
+    } catch (HiveException e) {
+      e.printStackTrace();
+      throw new HiveException("Could not get table", e);
+    }
+    return tbl;
+  }
+
+  public boolean isFactTable(String tableName) throws HiveException {
+    Table tbl = getTable(tableName);
+    String tableType = tbl.getParameters().get(
+        MetastoreConstants.TABLE_TYPE_KEY);
+    return CubeTableType.FACT.equals(tableType);
+  }
+
+  public boolean isDimensionTable(String tableName) throws HiveException {
+    Table tbl = getTable(tableName);
+    String tableType = tbl.getParameters().get(
+        MetastoreConstants.TABLE_TYPE_KEY);
+    return CubeTableType.DIMENSION.equals(tableType);
+  }
+
+  public boolean isCube(String tableName) throws HiveException {
+    Table tbl = getTable(tableName);
+    String tableType = tbl.getParameters().get(
+        MetastoreConstants.TABLE_TYPE_KEY);
+    return CubeTableType.CUBE.equals(tableType);
+  }
+
+  public CubeFactTable getFactTable(String tableName) throws HiveException {
+    Table tbl = getTable(tableName);
+    if (CubeTableType.FACT.equals(tbl.getParameters().get(
+        MetastoreConstants.TABLE_TYPE_KEY))) {
+      return new CubeFactTable(tbl);
+    }
+    return null;
+  }
+
+  public CubeDimensionTable getDimensionTable(String tableName)
+      throws HiveException {
+    Table tbl = getTable(tableName);
+    if (CubeTableType.DIMENSION.equals(tbl.getParameters().get(
+        MetastoreConstants.TABLE_TYPE_KEY))) {
+      return new CubeDimensionTable(tbl);
+    }
+    return null;
+  }
+
+  public Cube getCube(String tableName) throws HiveException {
+    Table tbl = getTable(tableName);
+    if (CubeTableType.CUBE.equals(tbl.getParameters().get(
+        MetastoreConstants.TABLE_TYPE_KEY))) {
+      return new Cube(tbl);
+    }
+    return null;
+  }
+
+  public List<CubeDimensionTable> getAllDimensionTables()
+      throws HiveException {
+    List<CubeDimensionTable> dimTables = new ArrayList<CubeDimensionTable>();
+    try {
+      for (String table : getClient().getAllTables()) {
+        if (isDimensionTable(table)) {
+          dimTables.add(getDimensionTable(table));
+        }
+      }
+    } catch (HiveException e) {
+      throw new HiveException("Could not get all tables", e);
+    }
+    return dimTables;
+  }
+
+  public List<CubeFactTable> getAllFactTables() throws HiveException {
+    List<CubeFactTable> factTables = new ArrayList<CubeFactTable>();
+    try {
+      for (String table : getClient().getAllTables()) {
+        if (isFactTable(table)) {
+          factTables.add(getFactTable(table));
+        }
+      }
+    } catch (HiveException e) {
+      throw new HiveException("Could not get all tables", e);
+    }
+    return factTables;
+  }
+
+  public boolean isColumnInTable(String column, String table) {
+    try {
+      List<String> columns = getColumnNames(table);
+      if (columns == null) {
+        return false;
+      } else {
+        return columns.contains(column);
+      }
+    } catch (HiveException e) {
+      e.printStackTrace();
+      return false;
+    }
+  }
+
+  private List<String> getColumnNames(String table) throws HiveException {
+    List<FieldSchema> fields = getTable(table).getCols();
+    List<String> columns = new ArrayList<String>(fields.size());
+    for (FieldSchema f : fields) {
+      columns.add(f.getName());
+    }
+    return columns;
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeTableType.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeTableType.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeTableType.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeTableType.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,7 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+public enum CubeTableType {
+  CUBE,
+  FACT,
+  DIMENSION
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ExprMeasure.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ExprMeasure.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ExprMeasure.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ExprMeasure.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,69 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+
+public final class ExprMeasure extends CubeMeasure {
+  private final String expr;
+
+  public ExprMeasure(FieldSchema column, String expr, String formatString,
+      String aggregate, String unit) {
+    super(column, formatString, aggregate, unit);
+    this.expr = expr;
+    assert (expr != null);
+  }
+
+  public ExprMeasure(FieldSchema column, String expr) {
+    this(column, expr, null, null, null);
+  }
+
+  public ExprMeasure(String name, Map<String, String> props) {
+    super(name, props);
+    this.expr = props.get(MetastoreUtil.getMeasureExprPropertyKey(getName()));
+  }
+
+  public String getExpr() {
+    return expr;
+  }
+
+  @Override
+  public void addProperties(Map<String, String> props) {
+    super.addProperties(props);
+    props.put(MetastoreUtil.getMeasureClassPropertyKey(getName()),
+        this.getClass().getCanonicalName());
+    props.put(MetastoreUtil.getMeasureExprPropertyKey(getName()), expr);
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = super.hashCode();
+    result = prime * result + ((getExpr() == null) ? 0 :
+      getExpr().toLowerCase().hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (!super.equals(obj)) {
+      return false;
+    }
+    ExprMeasure other = (ExprMeasure)obj;
+    if (this.getExpr() == null) {
+      if (other.getExpr() != null) {
+        return false;
+      }
+    } else if (!this.getExpr().equalsIgnoreCase(other.getExpr())) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public String toString() {
+    String str = super.toString();
+    str += "expr:" + expr;
+    return str;
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HDFSStorage.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HDFSStorage.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HDFSStorage.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HDFSStorage.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,189 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import static org.apache.hadoop.hive.serde.serdeConstants.COLLECTION_DELIM;
+import static org.apache.hadoop.hive.serde.serdeConstants.ESCAPE_CHAR;
+import static org.apache.hadoop.hive.serde.serdeConstants.FIELD_DELIM;
+import static org.apache.hadoop.hive.serde.serdeConstants.LINE_DELIM;
+import static org.apache.hadoop.hive.serde.serdeConstants.MAPKEY_DELIM;
+import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT;
+
+import java.util.Map;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+
+public class HDFSStorage extends Storage {
+
+  private Path tableLocation;
+  private Path partLocation;
+
+  private String inputFormat;
+  private String outputFormat;
+  private boolean isCompressed = true;
+
+  // Delimited row format
+  private String fieldDelimiter;
+  private String escapeChar;
+  private String collectionDelimiter;
+  private String lineDelimiter;
+  private String mapKeyDelimiter;
+
+  // serde row format
+  private String serdeClassName;
+
+  public HDFSStorage(String name, String inputFormat, String outputFormat) {
+    this(name, inputFormat, outputFormat, null);
+  }
+
+  public HDFSStorage(String name, String inputFormat, String outputFormat,
+      Path tableLocation) {
+    this(name, inputFormat, outputFormat, null, null, tableLocation, true);
+  }
+
+  public HDFSStorage(String name, String inputFormat, String outputFormat,
+      String fieldDelimiter, String lineDelimiter, Path tableLocation) {
+    this(name, inputFormat, outputFormat, fieldDelimiter, lineDelimiter, null,
+        null, null, true, null, null, tableLocation);
+  }
+
+  public HDFSStorage(String name, String inputFormat, String outputFormat,
+      boolean isCompressed) {
+    this(name, inputFormat, outputFormat, null, isCompressed);
+  }
+
+  public HDFSStorage(String name, String inputFormat, String outputFormat,
+      Path tableLocation, boolean isCompressed) {
+    this(name, inputFormat, outputFormat, null, null, tableLocation, isCompressed);
+  }
+
+  public HDFSStorage(String name, String inputFormat, String outputFormat,
+      String fieldDelimiter, String lineDelimiter, Path tableLocation,
+      boolean isCompressed) {
+    this(name, inputFormat, outputFormat, fieldDelimiter, lineDelimiter, null, null,
+        null, isCompressed, null, null, tableLocation);
+  }
+
+  public HDFSStorage(String name, String inputFormat, String outputFormat,
+      String fieldDelimiter, String lineDelimiter, String escapeChar,
+      String collectionDelimiter, String mapKeyDelimiter, boolean isCompressed,
+      Map<String, String> tableParameters, Map<String, String> serdeParameters,
+      Path tableLocation) {
+    this(name, inputFormat, outputFormat, isCompressed, tableParameters,
+        serdeParameters, tableLocation);
+    this.fieldDelimiter = fieldDelimiter;
+    this.escapeChar = escapeChar;
+    this.lineDelimiter = lineDelimiter;
+    this.collectionDelimiter = collectionDelimiter;
+    this.mapKeyDelimiter = mapKeyDelimiter;
+  }
+
+  public HDFSStorage(String name, String inputFormat, String outputFormat,
+      String serdeClassName, boolean isCompressed,
+      Map<String, String> tableParameters, Map<String, String> serdeParameters,
+      Path tableLocation) {
+    this(name, inputFormat, outputFormat, isCompressed, tableParameters,
+        serdeParameters, tableLocation);
+    this.serdeClassName = serdeClassName;
+  }
+
+  public HDFSStorage(Table table) {
+    super("HDFS", TableType.EXTERNAL_TABLE);
+    //TODO
+  }
+
+  private HDFSStorage(String name, String inputFormat, String outputFormat,
+      boolean isCompressed,
+      Map<String, String> tableParameters, Map<String, String> serdeParameters,
+      Path tableLocation) {
+    super(name, TableType.EXTERNAL_TABLE);
+    this.inputFormat = inputFormat;
+    this.outputFormat = outputFormat;
+    this.isCompressed = isCompressed;
+    if (tableParameters != null) {
+      addToTableParameters(tableParameters);
+    }
+    if (serdeParameters != null) {
+      this.serdeParameters.putAll(serdeParameters);
+    }
+    this.tableLocation = tableLocation;
+  }
+
+  @Override
+  public void setSD(StorageDescriptor sd) {
+    if (fieldDelimiter != null) {
+      serdeParameters.put(FIELD_DELIM, fieldDelimiter);
+      serdeParameters.put(SERIALIZATION_FORMAT, fieldDelimiter);
+    }
+    if (escapeChar != null) {
+      serdeParameters.put(ESCAPE_CHAR, escapeChar);
+    }
+    if (collectionDelimiter != null) {
+      serdeParameters.put(COLLECTION_DELIM, collectionDelimiter);
+    }
+    if (mapKeyDelimiter != null) {
+      serdeParameters.put(MAPKEY_DELIM, mapKeyDelimiter);
+    }
+
+    if (serdeParameters != null) {
+      serdeParameters.put(LINE_DELIM, lineDelimiter);
+    }
+
+    sd.getSerdeInfo().getParameters().putAll(serdeParameters);
+
+    if (outputFormat != null) {
+      sd.setOutputFormat(outputFormat);
+    }
+    if (inputFormat != null) {
+      sd.setInputFormat(inputFormat);
+    }
+    sd.setCompressed(isCompressed);
+    if (serdeClassName != null) {
+      sd.getSerdeInfo().setSerializationLib(serdeClassName);
+    } else {
+      sd.getSerdeInfo().setSerializationLib(
+          org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
+    }
+    if (tableLocation != null) {
+      sd.setLocation(tableLocation.toString());
+    }
+  }
+
+  public Path getPartLocation() {
+    return partLocation;
+  }
+
+  public void setPartLocation(Path partLocation) {
+    this.partLocation = partLocation;
+  }
+
+  @Override
+  public void addPartition(String storageTableName,
+      Map<String, String> partSpec, HiveConf conf,
+      boolean makeLatest) throws HiveException {
+    Hive client = Hive.get(conf);
+    Table storageTbl = client.getTable(storageTableName);
+    Path location = null;
+    if (partLocation != null) {
+      if (partLocation.isAbsolute()) {
+        location = partLocation;
+      } else {
+        location = new Path(storageTbl.getPath(), partLocation);
+      }
+    }
+    client.createPartition(storageTbl, partSpec,
+        location, getTableParameters(),  inputFormat, outputFormat, -1,
+        storageTbl.getCols(), serdeClassName, serdeParameters, null, null);
+    if (makeLatest) {
+      // symlink this partition to latest
+      client.createPartition(storageTbl, getLatestPartSpec(),
+          location, getTableParameters(),  inputFormat, outputFormat, -1,
+          storageTbl.getCols(), serdeClassName, serdeParameters, null, null);
+    }
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/InlineDimension.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/InlineDimension.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/InlineDimension.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/InlineDimension.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,69 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+
+public class InlineDimension extends BaseDimension {
+
+  private final List<String> values;
+
+  public InlineDimension(FieldSchema column, List<String> values) {
+    super(column);
+    this.values = values;
+  }
+
+  public List<String> getValues() {
+    return values;
+  }
+
+  @Override
+  public void addProperties(Map<String, String> props) {
+    super.addProperties(props);
+    props.put(MetastoreUtil.getInlineDimensionSizeKey(getName()),
+        String.valueOf(values.size()));
+    props.put(MetastoreUtil.getInlineDimensionValuesKey(getName()),
+        MetastoreUtil.getStr(values));
+  }
+
+  public InlineDimension(String name, Map<String, String> props) {
+    super(name, props);
+    String valueStr = props.get(MetastoreUtil.getInlineDimensionValuesKey(name));
+    this.values = Arrays.asList(valueStr.split(","));
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = super.hashCode();
+    result = prime * result + ((getValues() == null) ? 0 :
+      getValues().hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (!super.equals(obj)) {
+      return false;
+    }
+    InlineDimension other = (InlineDimension)obj;
+    if (this.getValues() == null) {
+      if (other.getValues() != null) {
+        return false;
+      }
+    } else if (!this.getValues().equals(other.getValues())) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public String toString() {
+    String str = super.toString();
+    str += "values:" + MetastoreUtil.getStr(values);
+    return str;
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/MetastoreConstants.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/MetastoreConstants.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/MetastoreConstants.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/MetastoreConstants.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,36 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+public interface MetastoreConstants {
+  public static final String TABLE_TYPE_KEY = "cube.table.type";
+
+  // Cube constants
+  public static final String CUBE_KEY_PFX = "cube.";
+  public static final String MEASURES_LIST_SFX = ".measures.list";
+  public static final String DIMENSIONS_LIST_SFX = ".dimensions.list";
+
+  // fact constants
+  public static final String FACT_KEY_PFX = "cube.fact.";
+  public static final String UPDATE_PERIOD_SFX = ".updateperiods";
+  public static final String CUBE_NAME_SFX = ".cubename";
+
+  // column constants
+  public static final String TYPE_SFX = ".type";
+
+  // measure constants
+  public static final String MEASURE_KEY_PFX = "cube.measure.";
+  public static final String UNIT_SFX = ".unit";
+  public static final String AGGR_SFX = ".aggregate";
+  public static final String EXPR_SFX = ".expr";
+  public static final String FORMATSTRING_SFX = ".format";
+
+  // dimension constants
+  public static final String DIM_KEY_PFX = "cube.dimension.";
+  public static final String DIM_REFERS_SFX = ".refers";
+  public static final String TABLE_COLUMN_SEPERATOR = ".";
+  public static final String INLINE_SIZE_SFX = ".inline.size";
+  public static final String INLINE_VALUES_SFX = ".inline.values";
+  public static final String HIERARCHY_SFX = ".hierarchy.";
+  public static final String CLASS_SFX = ".class";
+  public static final String DUMP_PERIOD_SFX = ".dumpperiod";
+  public static final String STORAGE_LIST_SFX = ".storages";
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/MetastoreUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/MetastoreUtil.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/MetastoreUtil.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/MetastoreUtil.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,200 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import java.util.Collection;
+import java.util.Iterator;
+
+public class MetastoreUtil implements MetastoreConstants {
+
+  public static final String getVirtualFactTableName(String factName,
+      UpdatePeriod updatePeriod) {
+    return factName + "_" + updatePeriod.name();
+  }
+
+  public static final String getFactStorageTableName(String factName,
+      UpdatePeriod updatePeriod, String storagePrefix) {
+    return getStorageTableName(getVirtualFactTableName(factName,
+        updatePeriod), storagePrefix);
+  }
+
+  public static final String getDimStorageTableName(String dimName,
+      String storagePrefix) {
+    return getStorageTableName(dimName, storagePrefix);
+  }
+
+  public static final String getStorageTableName(String virtualTableName,
+      String storagePrefix) {
+    return storagePrefix + virtualTableName;
+  }
+
+  public static String getCubeNameFromVirtualName(String virtualName,
+      CubeTableType type) {
+    return virtualName.substring(type.name().length() + 1);
+  }
+
+  /////////////////////////
+  // Dimension properties//
+  /////////////////////////
+  public static final String getDimTypePropertyKey(String dimName) {
+    return getDimensionKeyPrefix(dimName) + TYPE_SFX;
+  }
+
+  public static String getHierachyElementKeyPFX(String dimName) {
+    return getDimensionKeyPrefix(dimName) + HIERARCHY_SFX;
+  }
+  public static String getHierachyElementKeyName(String dimName, int index) {
+    return getHierachyElementKeyPFX(dimName) + index;
+  }
+
+  public static Integer getHierachyElementIndex(String dimName, String param) {
+    return Integer.parseInt(param.substring(getHierachyElementKeyPFX(
+        dimName).length()));
+  }
+
+  public static final String getDimensionSrcReferenceKey(String dimName) {
+    return getDimensionKeyPrefix(dimName) + DIM_REFERS_SFX;
+  }
+
+  public static final String getDimensionDestReference(String tableName,
+      String columnName) {
+    return tableName.toLowerCase() + TABLE_COLUMN_SEPERATOR
+        + columnName.toLowerCase();
+  }
+
+  public static final String getDimensionDestReference(
+      TableReference reference) {
+    return reference.getDestTable() + TABLE_COLUMN_SEPERATOR
+        + reference.getDestColumn();
+  }
+
+  public static String getInlineDimensionSizeKey(String name) {
+    return getDimensionKeyPrefix(name) + INLINE_SIZE_SFX;
+  }
+
+  public static String getInlineDimensionValuesKey(String name) {
+    return getDimensionKeyPrefix(name) + INLINE_VALUES_SFX;
+  }
+
+  public static String getDimensionKeyPrefix(String dimName) {
+    return DIM_KEY_PFX + dimName.toLowerCase();
+  }
+
+  public static String getDimensionDumpPeriodKey(String name, String storage) {
+    return getDimensionKeyPrefix(name) + "." + storage.toLowerCase() +
+        DUMP_PERIOD_SFX ;
+  }
+
+  public static String getDimensionStorageListKey(String name) {
+    return getDimensionKeyPrefix(name) +  STORAGE_LIST_SFX;
+  }
+
+  ////////////////////////////
+  // Measure properties  ///
+  ///////////////////////////
+  public static final String getMeasurePrefix(String measureName) {
+    return MEASURE_KEY_PFX + measureName.toLowerCase();
+  }
+
+  public static final String getMeasureClassPropertyKey(String measureName) {
+    return getMeasurePrefix(measureName) + CLASS_SFX;
+  }
+
+  public static final String getMeasureUnitPropertyKey(String measureName) {
+    return getMeasurePrefix(measureName) + UNIT_SFX;
+  }
+
+  public static final String getMeasureTypePropertyKey(String measureName) {
+    return getMeasurePrefix(measureName) + TYPE_SFX;
+  }
+
+  public static final String getMeasureFormatPropertyKey(String measureName) {
+    return getMeasurePrefix(measureName) + FORMATSTRING_SFX;
+  }
+
+  public static final String getMeasureAggrPropertyKey(String measureName) {
+    return getMeasurePrefix(measureName) + AGGR_SFX;
+  }
+
+  public static final String getMeasureExprPropertyKey(String measureName) {
+    return getMeasurePrefix(measureName) + EXPR_SFX;
+  }
+
+  ////////////////////////////
+  // Cube properties  ///
+  ///////////////////////////
+  public static final String getCubePrefix(String cubeName) {
+    return CUBE_KEY_PFX + cubeName.toLowerCase();
+  }
+
+  public static final String getCubeMeasureListKey(String cubeName) {
+    return getCubePrefix(cubeName) + MEASURES_LIST_SFX;
+  }
+
+  public static final String getCubeDimensionListKey(String cubeName) {
+    return getCubePrefix(cubeName) + DIMENSIONS_LIST_SFX;
+  }
+
+  ////////////////////////////
+  // Fact propertes  ///
+  ///////////////////////////
+  public static String getFactStorageListKey(String name) {
+    return getFactKeyPrefix(name) +  STORAGE_LIST_SFX;
+  }
+
+  public static String getFactKeyPrefix(String factName) {
+    return FACT_KEY_PFX + factName.toLowerCase();
+  }
+
+  public static String getFactUpdatePeriodKey(String name, String storage) {
+    return getFactKeyPrefix(name) +  "." + storage.toLowerCase()
+        + UPDATE_PERIOD_SFX;
+  }
+
+  public static String getFactCubeNameKey(String name) {
+    return getFactKeyPrefix(name) + CUBE_NAME_SFX;
+  }
+
+  ////////////////////////////
+  // Utils  ///
+  ///////////////////////////
+  public static <E extends Named> String getNamedStr(Collection<E> set) {
+    if (set == null || set.isEmpty()) {
+      return "";
+    }
+    StringBuilder valueStr = new StringBuilder();
+    Iterator<E> it = set.iterator();
+    for (int i = 0; i < (set.size()-1) ; i++) {
+      valueStr.append(it.next().getName());
+      valueStr.append(",");
+    }
+    valueStr.append(it.next().getName());
+    return valueStr.toString();
+  }
+
+  public static String getObjectStr(Collection<?> set) {
+    if (set == null || set.isEmpty()) {
+      return "";
+    }
+    StringBuilder valueStr = new StringBuilder();
+    Iterator<?> it = set.iterator();
+    for (int i = 0; i < (set.size()-1) ; i++) {
+      valueStr.append(it.next().toString());
+      valueStr.append(",");
+    }
+    valueStr.append(it.next().toString());
+    return valueStr.toString();
+  }
+
+  public static String getStr(Collection<String> set) {
+    if (set == null || set.isEmpty()) {
+      return "";
+    }
+    StringBuilder valueStr = new StringBuilder();
+    Iterator<String> it = set.iterator();
+    for (int i = 0; i < (set.size()-1) ; i++) {
+      valueStr.append(it.next());
+      valueStr.append(",");
+    }
+    valueStr.append(it.next());
+    return valueStr.toString();
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Named.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Named.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Named.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Named.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,5 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+public interface Named {
+  public String getName();
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ReferencedDimension.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ReferencedDimension.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ReferencedDimension.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ReferencedDimension.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,63 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+
+public class ReferencedDimension extends BaseDimension {
+  private final TableReference reference;
+
+  public ReferencedDimension(FieldSchema column, TableReference reference) {
+    super(column);
+    this.reference = reference;
+  }
+
+  public TableReference getReference() {
+    return reference;
+  }
+
+  @Override
+  public void addProperties(Map<String, String> props) {
+    super.addProperties(props);
+    props.put(MetastoreUtil.getDimensionSrcReferenceKey(getName()),
+        MetastoreUtil.getDimensionDestReference(reference));
+  }
+
+  public ReferencedDimension(String name, Map<String, String> props) {
+    super(name, props);
+    this.reference = new TableReference(
+        props.get(MetastoreUtil.getDimensionSrcReferenceKey(getName())));
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = super.hashCode();
+    result = prime * result + ((getReference() == null) ? 0 :
+      getReference().hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (!super.equals(obj)) {
+      return false;
+    }
+    ReferencedDimension other = (ReferencedDimension)obj;
+    if (this.getReference() == null) {
+      if (other.getReference() != null) {
+        return false;
+      }
+    } else if (!this.getReference().equals(other.getReference())) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public String toString() {
+    String str = super.toString();
+    str += "reference:" + getReference();
+    return str;
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,90 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde.serdeConstants;
+
+public abstract class Storage implements Named {
+
+  private final TableType tableType;
+  private final Map<String, String> tableParameters = new HashMap<String, String>();
+  private final List<FieldSchema> partCols = new ArrayList<FieldSchema>();
+  protected Map<String, String> serdeParameters = new HashMap<String, String>();
+  private final String name;
+
+  protected Storage(String name,TableType type) {
+    this.tableType = type;
+    this.name = name;
+  }
+
+  public List<FieldSchema> getPartCols() {
+    return partCols;
+  }
+
+  public TableType getTableType() {
+    return tableType;
+  }
+
+  public Map<String, String> getTableParameters() {
+    return tableParameters;
+  }
+
+  public void addToPartCols(FieldSchema column) {
+    partCols.add(column);
+  }
+
+  protected void addToTableParameters(Map<String, String> parameters) {
+    tableParameters.putAll(tableParameters);
+  }
+
+  protected void addTableProperty(String key, String value) {
+    tableParameters.put(key, value);
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public String getPrefix() {
+    return getPrefix(getName());
+  }
+
+  public static String getPrefix(String name) {
+    return name + StorageConstants.STORGAE_SEPERATOR;
+  }
+
+  public abstract void setSD(StorageDescriptor physicalSd) throws HiveException;
+
+  public abstract void addPartition(String storageTableName,
+      Map<String, String> partSpec, HiveConf conf, boolean makeLatest)
+          throws HiveException;
+
+
+  public static String getDatePartitionKey() {
+    return StorageConstants.DATE_PARTITION_KEY;
+  }
+
+  private static Map<String, String> latestSpec = new HashMap<String, String>();
+  static {
+    latestSpec.put(getDatePartitionKey(), StorageConstants.LATEST_PARTITION_VALUE);
+  }
+
+  public static Map<String, String> getLatestPartSpec() {
+    return latestSpec;
+  }
+
+  private static FieldSchema dtPart = new FieldSchema(getDatePartitionKey(),
+      serdeConstants.STRING_TYPE_NAME,
+      "date partition");
+  public static FieldSchema getDatePartition() {
+    return dtPart;
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/StorageConstants.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/StorageConstants.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/StorageConstants.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/StorageConstants.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,9 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+public interface StorageConstants {
+  public static final String ES_STORAGE_NAME = "ES";
+  public static final String HDFS_STORAGE_NAME = "HDFS";
+  public static final String DATE_PARTITION_KEY = "dt";
+  public static final String STORGAE_SEPERATOR = "_";
+  public static final String LATEST_PARTITION_VALUE = "latest";
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/TableReference.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/TableReference.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/TableReference.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/TableReference.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,77 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+public class TableReference {
+  private String destTable;
+  private String destColumn;
+
+  public TableReference() {
+  }
+
+  public TableReference(String destTable, String destColumn) {
+    this.destTable = destTable;
+    this.destColumn = destColumn;
+  }
+
+  public TableReference(String reference) {
+    String desttoks[] = reference.split("\\.+");
+    this.destTable = desttoks[0];
+    this.destColumn = desttoks[1];
+  }
+
+  public String getDestTable() {
+    return destTable;
+  }
+
+  public void setDestTable(String dest) {
+    this.destTable = dest;
+  }
+
+  public String getDestColumn() {
+    return destColumn;
+  }
+
+  public void setDestColumn(String destColumn) {
+    this.destColumn = destColumn;
+  }
+
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (obj == null) {
+      return false;
+    }
+    if (getClass() != obj.getClass()) {
+      return false;
+    }
+    TableReference other = (TableReference) obj;
+    if (this.getDestColumn() == null) {
+      if (other.getDestColumn() != null) {
+        return false;
+      }
+    } else if (!this.getDestColumn().equals(other.getDestColumn())) {
+      return false;
+    }
+    if (this.getDestTable() == null) {
+      if (other.getDestTable() != null) {
+        return false;
+      }
+    } else if (!this.getDestTable().equals(other.getDestTable())) {
+      return false;
+    }
+    return true;
+  }
+
+  public String toString() {
+    return destTable + "." + destColumn;
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + ((destColumn == null) ? 0 : destColumn.hashCode());
+    result = prime * result + ((destTable == null) ? 0 : destTable.hashCode());
+    return result;
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java?rev=1456360&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java Thu Mar 14 08:09:31 2013
@@ -0,0 +1,43 @@
+package org.apache.hadoop.hive.ql.cube.metadata;
+
+import java.util.Calendar;
+
+
+public enum UpdatePeriod implements Named {
+  SECONDLY (Calendar.SECOND, 1000, "yyyy-MM-dd-HH-mm-ss"),
+  MINUTELY (Calendar.MINUTE, 60 * SECONDLY.weight(), "yyyy-MM-dd-HH-mm"),
+  HOURLY (Calendar.HOUR_OF_DAY, 60 * MINUTELY.weight(), "yyyy-MM-dd-HH"),
+  DAILY (Calendar.DATE, 24 * HOURLY.weight(), "yyyy-MM-DD"),
+  WEEKLY (Calendar.WEEK_OF_YEAR, 7 * DAILY.weight(), "yyyy-'W'ww-u"),
+  MONTHLY (Calendar.MONTH, 30 * DAILY.weight(), "yyyy-MM"),
+  //QUARTERLY (Calendar.MONTH, 3 * MONTHLY.weight(), "YYYY-MM"),
+  YEARLY (Calendar.YEAR, 12 * MONTHLY.weight(), "yyyy");
+
+  public static final long MIN_INTERVAL = SECONDLY.weight();
+  private final int calendarField;
+  private final long weight;
+  private final String format;
+
+  UpdatePeriod(int calendarField, long diff, String format) {
+    this.calendarField = calendarField;
+    this.weight = diff;
+    this.format = format;
+  }
+
+  public int calendarField() {
+    return this.calendarField;
+  }
+
+  public long weight() {
+    return this.weight;
+  }
+
+  public String format() {
+    return this.format;
+  }
+
+  @Override
+  public String getName() {
+    return name();
+  }
+}



Mime
View raw message