hawq-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From r..@apache.org
Subject [40/51] [partial] incubator-hawq git commit: SGA import
Date Sat, 19 Sep 2015 00:36:23 GMT
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQInterval.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQInterval.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQInterval.java
new file mode 100644
index 0000000..cff776e
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQInterval.java
@@ -0,0 +1,253 @@
+package com.pivotal.hawq.mapreduce.datatype;
+
+import com.pivotal.hawq.mapreduce.HAWQException;
+
+/**
+ * Store value of interval in database
+ */
+public class HAWQInterval
+{
+	private int year;
+	private int month;
+	private int day;
+	private int hour;
+	private int minute;
+	private int second;
+	private int millisecond;
+	private int microsecond;
+
+	/**
+	 * Initialize an interval
+	 * 
+	 * @param year
+	 *            years in this interval
+	 * @param month
+	 *            month in this interval, 0 to 11
+	 * @param day
+	 *            day in this interval
+	 * @param hour
+	 *            hour in this interval, 0 to 23
+	 * @param minute
+	 *            minute in this interval, 0 to 59
+	 * @param second
+	 *            second in this interval, 0 to 59
+	 * @throws HAWQException
+	 *             when parameter is invalid
+	 */
+	public HAWQInterval(int year, int month, int day, int hour, int minute,
+			int second) throws HAWQException
+	{
+		this(year, month, day, hour, minute, second, 0, 0);
+	}
+
+	/**
+	 * Initialize an interval
+	 * 
+	 * @param year
+	 *            years in this interval
+	 * @param month
+	 *            month in this interval, 0 to 11
+	 * @param day
+	 *            day in this interval
+	 * @param hour
+	 *            hour in this interval, 0 to 23
+	 * @param minute
+	 *            minute in this interval, 0 to 59
+	 * @param second
+	 *            second in this interval, 0 to 59
+	 * @param millisecond
+	 *            millisecond in this interval, 0 to 999
+	 * @param microsecond
+	 *            microsecond in this interval, 0 to 999
+	 * @throws HAWQException
+	 *             when parameter is invalid
+	 */
+	public HAWQInterval(int year, int month, int day, int hour, int minute,
+			int second, int millisecond, int microsecond) throws HAWQException
+	{
+		if (month < 0 || month > 11)
+			throw new HAWQException(
+					"Months in interval should between 0 and 11");
+		if (hour < 0 || hour > 23)
+			throw new HAWQException("Hours in interval should between 0 and 23");
+		if (minute < 0 || minute > 59)
+			throw new HAWQException(
+					"Minutes in interval should between 0 and 59");
+		if (second < 0 || second > 59)
+			throw new HAWQException(
+					"Seconds in interval should between 0 and 59");
+		if (millisecond < 0 || millisecond > 999)
+			throw new HAWQException(
+					"Milliseconds in interval should between 0 and 999");
+		if (microsecond < 0 || microsecond > 999)
+			throw new HAWQException(
+					"Microseconds in interval should between 0 and 999");
+
+		this.year = year;
+		this.month = month;
+		this.day = day;
+		this.hour = hour;
+		this.minute = minute;
+		this.second = second;
+		this.millisecond = millisecond;
+		this.microsecond = microsecond;
+	}
+
+	/**
+	 * Get how many years in this interval
+	 * 
+	 * @return years amount of years
+	 */
+	public int getYears()
+	{
+		return year;
+	}
+
+	/**
+	 * Get how many months in this interval(0-11)
+	 * 
+	 * @return months amount of months
+	 */
+	public int getMonths()
+	{
+		return month;
+	}
+
+	/**
+	 * Get how many days in this interval
+	 * 
+	 * @return days amount of days
+	 */
+	public int getDays()
+	{
+		return day;
+	}
+
+	/**
+	 * Get how many hours in this interval(0-23)
+	 * 
+	 * @return hours amount of hours
+	 */
+	public int getHours()
+	{
+		return hour;
+	}
+
+	/**
+	 * Get how many minutes in this interval(0-59)
+	 * 
+	 * @return minutes amount of minutes
+	 */
+	public int getMinutes()
+	{
+		return minute;
+	}
+
+	/**
+	 * Get how many seconds in this interval(0-59)
+	 * 
+	 * @return seconds amount of seconds
+	 */
+	public int getSeconds()
+	{
+		return second;
+	}
+
+	/**
+	 * Get how many milliseconds in this interval(0-999)
+	 * 
+	 * @return milliseconds amount of milliseconds
+	 */
+	public int getMilliseconds()
+	{
+		return millisecond;
+	}
+
+	/**
+	 * Get how many microseconds in this interval(0-999)
+	 * 
+	 * @return microseconds amount of microseconds
+	 */
+	public int getMicroseconds()
+	{
+		return microsecond;
+	}
+
+	@Override
+	public boolean equals(Object obj)
+	{
+		if (obj instanceof HAWQInterval)
+		{
+			HAWQInterval other = (HAWQInterval) obj;
+			return year == other.getYears() && month == other.getMonths()
+					&& day == other.getDays() && hour == other.getHours()
+					&& minute == other.getMinutes()
+					&& second == other.getSeconds()
+					&& millisecond == other.getMilliseconds()
+					&& microsecond == other.getMicroseconds();
+		}
+		return false;
+	}
+
+	@Override
+	public String toString()
+	{
+		StringBuffer buffer = new StringBuffer();
+		if (year != 0)
+		{
+			buffer.append(year);
+			buffer.append(" year");
+			if (year != 1)
+				buffer.append('s');
+			buffer.append(' ');
+		}
+		if (month != 0)
+		{
+			buffer.append(month);
+			buffer.append(" mon");
+			if (month != 1)
+				buffer.append('s');
+			buffer.append(' ');
+		}
+		if (day != 0)
+		{
+			buffer.append(day);
+			buffer.append(" day");
+			if (day != 1)
+				buffer.append('s');
+			buffer.append(' ');
+		}
+		if (hour != 0 || minute != 0 || second != 0 || microsecond != 0
+				|| millisecond != 0)
+		{
+			buffer.append(String.format("%02d", hour)).append(':');
+			buffer.append(String.format("%02d", minute)).append(':');
+			buffer.append(String.format("%02d", second));
+
+			if (millisecond != 0 || microsecond != 0)
+			{
+				buffer.append('.').append(String.format("%03d", millisecond));
+				if (microsecond != 0)
+					buffer.append(String.format("%03d", microsecond));
+
+				while (true)
+				{
+					int temp = buffer.length();
+					if (buffer.charAt(temp - 1) != '0'
+							&& buffer.charAt(temp - 1) != ' ')
+						break;
+					buffer.deleteCharAt(temp - 1);
+				}
+			}
+		}
+
+		while (true)
+		{
+			int temp = buffer.length();
+			if (buffer.charAt(temp - 1) != ' ')
+				break;
+			buffer.deleteCharAt(temp - 1);
+		}
+		return buffer.toString();
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQLseg.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQLseg.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQLseg.java
new file mode 100644
index 0000000..b21004d
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQLseg.java
@@ -0,0 +1,128 @@
+package com.pivotal.hawq.mapreduce.datatype;
+
+import com.pivotal.hawq.mapreduce.HAWQException;
+
+/**
+ * Store value of lseg in database
+ */
+public class HAWQLseg
+{
+	private HAWQPoint point1;
+	private HAWQPoint point2;
+
+	/**
+	 * Initialize a line segment from string
+	 * 
+	 * @param value
+	 *            the value that line segment init from. Should be like this:
+	 *            [(1.2,1.3),(2.2,2.3)]
+	 * @throws HAWQException
+	 *             when this string is not correct for line segment
+	 */
+	public HAWQLseg(String value) throws HAWQException
+	{
+		if (value.startsWith("[") && value.endsWith("]"))
+		{
+			String[] pointStrs = value.substring(1, value.length() - 1).split(
+					",");
+
+			if (pointStrs.length != 4)
+				throw new HAWQException("Cannot convert " + value
+						+ " to HAWQLseg");
+
+			String pointStr1 = pointStrs[0] + "," + pointStrs[1];
+			String pointStr2 = pointStrs[2] + "," + pointStrs[3];
+
+			try
+			{
+				init(new HAWQPoint(pointStr1), new HAWQPoint(pointStr2));
+			}
+			catch (HAWQException e)
+			{
+				throw new HAWQException("Cannot convert " + value
+						+ " to HAWQLseg");
+			}
+		}
+		else
+		{
+			throw new HAWQException("Cannot convert " + value + " to HAWQLseg");
+		}
+	}
+
+	/**
+	 * Initialize a line segment by coordinates
+	 * 
+	 * @param x1
+	 *            abscissa of first endpoint
+	 * @param y1
+	 *            ordinate of first endpoint
+	 * @param x2
+	 *            abscissa of second endpoint
+	 * @param y2
+	 *            ordinate of second endpoint
+	 */
+	public HAWQLseg(double x1, double y1, double x2, double y2)
+	{
+		init(new HAWQPoint(x1, y1), new HAWQPoint(x2, y2));
+	}
+
+	/**
+	 * Initialize a line segment by endpoints
+	 * 
+	 * @param point1
+	 *            first endpoint
+	 * @param point2
+	 *            second endpoint
+	 */
+	public HAWQLseg(HAWQPoint point1, HAWQPoint point2)
+	{
+		init(point1, point2);
+	}
+
+	private void init(HAWQPoint point1, HAWQPoint point2)
+	{
+		this.point1 = point1;
+		this.point2 = point2;
+	}
+
+	/**
+	 * Get first endpoint
+	 * 
+	 * @return first endpoint
+	 */
+	public HAWQPoint getPoint1()
+	{
+		return point1;
+	}
+
+	/**
+	 * Get second endpoint
+	 * 
+	 * @return second endpoint
+	 */
+	public HAWQPoint getPoint2()
+	{
+		return point2;
+	}
+
+	@Override
+	public boolean equals(Object obj)
+	{
+		if (obj instanceof HAWQLseg)
+		{
+			HAWQLseg other = (HAWQLseg) obj;
+			return point1.equals(other.getPoint1())
+					&& point2.equals(other.getPoint2());
+		}
+		return false;
+	}
+
+	@Override
+	public String toString()
+	{
+		StringBuffer buffer = new StringBuffer();
+		buffer.append('[').append(point1.toString()).append(',')
+				.append(point2.toString()).append(']');
+		return buffer.toString();
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQMacaddr.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQMacaddr.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQMacaddr.java
new file mode 100644
index 0000000..852a7c1
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQMacaddr.java
@@ -0,0 +1,68 @@
+package com.pivotal.hawq.mapreduce.datatype;
+
+import java.util.Arrays;
+
+/**
+ * Store value of macaddr in database
+ */
+public class HAWQMacaddr
+{
+	private byte[] bytes = new byte[6];
+
+	/**
+	 * Initialize a macaddr from byte array
+	 * 
+	 * @param bytes
+	 *            the byte array that macaddr init from
+	 */
+	public HAWQMacaddr(byte[] bytes)
+	{
+		this(bytes, 0);
+	}
+
+	/**
+	 * Initialize a macaddr from byte array
+	 * 
+	 * @param bytes
+	 *            the byte array that macaddr init from
+	 * @param offset
+	 *            offset in bytes
+	 */
+	public HAWQMacaddr(byte[] bytes, int offset)
+	{
+		if (bytes.length - offset >= 6)
+			System.arraycopy(bytes, offset, this.bytes, 0, 6);
+		else
+		{
+			System.arraycopy(bytes, offset, this.bytes, 0, bytes.length
+					- offset);
+			for (int i = bytes.length - offset; i < 6; i++)
+				bytes[i] = 0;
+		}
+	}
+
+	@Override
+	public boolean equals(Object obj)
+	{
+		if (this == obj)
+			return true;
+		if (obj instanceof HAWQMacaddr)
+			return Arrays.equals(this.bytes, ((HAWQMacaddr) obj).bytes);
+		return false;
+	}
+
+	@Override
+	public String toString()
+	{
+		StringBuffer buffer = new StringBuffer();
+		for (int i = 0; i < 6; ++i)
+		{
+			if (bytes[i] <= 0x0F && bytes[i] >= 0)
+				buffer.append('0');
+			buffer.append(Integer.toHexString(((int) bytes[i]) & 0xFF));
+			if (i != 5)
+				buffer.append(':');
+		}
+		return buffer.toString();
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQPath.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQPath.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQPath.java
new file mode 100644
index 0000000..dbc76f3
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQPath.java
@@ -0,0 +1,160 @@
+package com.pivotal.hawq.mapreduce.datatype;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import com.pivotal.hawq.mapreduce.HAWQException;
+
+/**
+ * Store value of path in database
+ */
+public class HAWQPath
+{
+	private List<HAWQPoint> points;
+	private boolean open;
+
+	/**
+	 * Initialize a path from string
+	 * 
+	 * @param value
+	 *            the value that path init from. Should be like this:
+	 *            [(1.2,1.3),(2.2,2.3)] or ((3.3,3.4),(4.3,4.4),(5.4,5.5)).
+	 *            [...] means this path is open and (...) means this path is
+	 *            closed
+	 * @throws HAWQException
+	 *             when this string is not correct for path
+	 */
+	public HAWQPath(String value) throws HAWQException
+	{
+		boolean open;
+		value.replaceAll(" ", "");
+		if (value.startsWith("[") && value.endsWith("]"))
+		{
+			open = true;
+		}
+		else if (value.startsWith("(") && value.endsWith(")"))
+		{
+			open = false;
+		}
+		else
+		{
+			throw new HAWQException("Cannot convert " + value + " to HAWQPath");
+		}
+		String[] pointStrs = value.substring(1, value.length() - 1).split(",");
+		if (pointStrs.length % 2 != 0)
+			throw new HAWQException("Cannot convert " + value + " to HAWQPath");
+
+		ArrayList<HAWQPoint> points = new ArrayList<HAWQPoint>();
+		for (int i = 0; i < pointStrs.length; i += 2)
+		{
+			String pointStr = pointStrs[i] + "," + pointStrs[i + 1];
+			try
+			{
+				points.add(new HAWQPoint(pointStr));
+			}
+			catch (HAWQException e)
+			{
+				throw new HAWQException("Cannot convert " + value
+						+ " to HAWQPath");
+			}
+		}
+		init(open, points);
+	}
+
+	/**
+	 * Initialize a path from points
+	 * 
+	 * @param open
+	 *            whether this path is open
+	 * @param points
+	 *            vertexes of this path
+	 */
+	public HAWQPath(boolean open, List<HAWQPoint> points)
+	{
+		init(open, points);
+	}
+
+	/**
+	 * Initialize a path from points
+	 * 
+	 * @param open
+	 *            whether this path is open
+	 * @param points
+	 *            vertexes of this path
+	 */
+	public HAWQPath(boolean open, HAWQPoint... points)
+	{
+		init(open, Arrays.asList(points));
+	}
+
+	private void init(boolean open, List<HAWQPoint> points)
+	{
+		this.points = points;
+		this.open = open;
+	}
+
+	/**
+	 * Get vertexes of this path
+	 * 
+	 * @return list of vertexes
+	 */
+	public List<HAWQPoint> getPoints()
+	{
+		return points;
+	}
+
+	/**
+	 * Whether this path is open
+	 * 
+	 * @return true for open path and false of closed path
+	 */
+	public boolean isOpen()
+	{
+		return open;
+	}
+
+	@Override
+	public boolean equals(Object obj)
+	{
+		if (obj instanceof HAWQPath)
+		{
+			HAWQPath other = (HAWQPath) obj;
+			if (open != other.open)
+				return false;
+
+			if (points.size() != other.getPoints().size())
+				return false;
+
+			for (int i = 0; i < points.size(); i++)
+			{
+				if (!points.get(i).equals(other.getPoints().get(i)))
+					return false;
+			}
+			return true;
+		}
+		return false;
+	}
+
+	@Override
+	public String toString()
+	{
+		StringBuffer buffer = new StringBuffer();
+		if (open)
+			buffer.append('[');
+		else
+			buffer.append('(');
+		int numOfPoints = points.size();
+		for (int i = 0; i < numOfPoints; ++i)
+		{
+			buffer.append(points.get(i));
+			if (i != numOfPoints - 1)
+				buffer.append(',');
+		}
+		if (open)
+			buffer.append(']');
+		else
+			buffer.append(')');
+		return buffer.toString();
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQPoint.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQPoint.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQPoint.java
new file mode 100644
index 0000000..b958528
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQPoint.java
@@ -0,0 +1,105 @@
+package com.pivotal.hawq.mapreduce.datatype;
+
+import com.pivotal.hawq.mapreduce.HAWQException;
+
+/**
+ * Store value of point in database
+ */
+public class HAWQPoint
+{
+	private double x;
+	private double y;
+
+	/**
+	 * Initialize a point from string
+	 * 
+	 * @param value
+	 *            the value that point init from. Should be like this: (1.2,1.3)
+	 * @throws HAWQException
+	 *             when this string is not correct for point
+	 */
+	public HAWQPoint(String value) throws HAWQException
+	{
+		if (value.startsWith("(") && value.endsWith(")"))
+		{
+			value.replaceAll(" ", "");
+			int posOfComma = value.indexOf(',');
+			double x = Double.parseDouble(value.substring(1, posOfComma));
+			double y = Double.parseDouble(value.substring(posOfComma + 1,
+					value.indexOf(')')));
+			init(x, y);
+		}
+		else
+		{
+			throw new HAWQException("Cannot convert " + value + " to HAWQPoint");
+		}
+	}
+
+	/**
+	 * Initialize a point by coordinates
+	 * 
+	 * @param x
+	 *            abscissa of this point
+	 * @param y
+	 *            ardinate of this point
+	 */
+	public HAWQPoint(double x, double y)
+	{
+		init(x, y);
+	}
+
+	private void init(double x, double y)
+	{
+		this.x = x;
+		this.y = y;
+	}
+
+	/**
+	 * Get abscissa
+	 * 
+	 * @return abscissa of this point
+	 */
+	public double getX()
+	{
+		return x;
+	}
+
+	/**
+	 * Get ordinate
+	 * 
+	 * @return ordinate of this point
+	 */
+	public double getY()
+	{
+		return y;
+	}
+
+	@Override
+	public boolean equals(Object obj)
+	{
+		if (obj instanceof HAWQPoint)
+		{
+			HAWQPoint other = (HAWQPoint) obj;
+			return x == other.getX() && y == other.getY();
+		}
+		return false;
+	}
+
+	@Override
+	public String toString()
+	{
+		StringBuffer buffer = new StringBuffer();
+		/*
+		 * GPSQL-936
+		 *
+		 * Remove trailing ".0" for float/double to match DB output
+		 */
+		buffer.append('(')
+			  .append(((Double) x).toString().replaceAll("\\.0$", ""))
+			  .append(',')
+			  .append(((Double) y).toString().replaceAll("\\.0$", ""))
+			  .append(')');
+
+		return buffer.toString();
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQPolygon.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQPolygon.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQPolygon.java
new file mode 100644
index 0000000..b7be5f4
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQPolygon.java
@@ -0,0 +1,155 @@
+package com.pivotal.hawq.mapreduce.datatype;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import com.pivotal.hawq.mapreduce.HAWQException;
+
+/**
+ * Store value of polygon in database
+ */
+public class HAWQPolygon
+{
+	private HAWQBox boundbox;
+	private List<HAWQPoint> points;
+
+	/**
+	 * Initialize a polygon from string
+	 * 
+	 * @param value
+	 *            the value that polygon init from. Should be like this:
+	 *            ((3.3,3.4),(4.3,4.4),(5.4,5.5))
+	 * @throws HAWQException
+	 */
+	public HAWQPolygon(String value) throws HAWQException
+	{
+		value.replaceAll(" ", "");
+		String[] pointStrs = value.substring(1, value.length() - 1).split(",");
+		if (pointStrs.length % 2 != 0)
+			throw new HAWQException("Cannot convert " + value
+					+ " to HAWQPolygon");
+
+		ArrayList<HAWQPoint> points = new ArrayList<HAWQPoint>();
+		for (int i = 0; i < pointStrs.length; i += 2)
+		{
+			String pointStr = pointStrs[i] + "," + pointStrs[i + 1];
+			try
+			{
+				points.add(new HAWQPoint(pointStr));
+			}
+			catch (HAWQException e)
+			{
+				throw new HAWQException("Cannot convert " + value
+						+ " to HAWQPath");
+			}
+		}
+		init(points, null);
+	}
+
+	/**
+	 * Initialize a polygon from vertexes and bound box
+	 * 
+	 * @param points
+	 * @param boundbox
+	 */
+	public HAWQPolygon(List<HAWQPoint> points, HAWQBox boundbox)
+	{
+		init(points, boundbox);
+	}
+
+	/**
+	 * Initialize a polygon from vertexes
+	 * 
+	 * @param points
+	 */
+	public HAWQPolygon(List<HAWQPoint> points)
+	{
+		this(points, null);
+	}
+
+	private void init(List<HAWQPoint> points, HAWQBox boundbox)
+	{
+		this.points = points;
+		this.boundbox = boundbox;
+		if (this.boundbox == null)
+			initBoundbox();
+	}
+
+	private void initBoundbox()
+	{
+		int pointsNum = points.size();
+		double minX = Double.MAX_VALUE;
+		double minY = Double.MAX_VALUE;
+		double maxX = Double.MIN_VALUE;
+		double maxY = Double.MIN_VALUE;
+		for (int i = 0; i < pointsNum; ++i)
+		{
+			double pointX = points.get(i).getX();
+			double pointY = points.get(i).getY();
+			if (pointX < minX)
+				minX = pointX;
+			else if (pointX > maxX)
+				maxX = pointX;
+			if (pointY < minY)
+				minY = pointY;
+			else if (pointY > maxY)
+				maxY = pointY;
+		}
+		this.boundbox = new HAWQBox(maxX, maxY, minX, minY);
+	}
+
+	/**
+	 * Get bound box of this path
+	 * 
+	 * @return bound box
+	 */
+	public HAWQBox getBoundbox()
+	{
+		return boundbox;
+	}
+
+	/**
+	 * Get vertexes of this path
+	 * 
+	 * @return list of vertexes
+	 */
+	public List<HAWQPoint> getPoints()
+	{
+		return points;
+	}
+
+	@Override
+	public boolean equals(Object obj)
+	{
+		if (obj instanceof HAWQPolygon)
+		{
+			HAWQPolygon other = (HAWQPolygon) obj;
+			if (points.size() != other.getPoints().size())
+				return false;
+
+			for (int i = 0; i < points.size(); i++)
+			{
+				if (!points.get(i).equals(other.getPoints().get(i)))
+					return false;
+			}
+			return boundbox.equals(other.getBoundbox());
+		}
+		return false;
+	}
+
+	@Override
+	public String toString()
+	{
+		StringBuffer buffer = new StringBuffer();
+		buffer.append('(');
+		int numOfPoints = points.size();
+		for (int i = 0; i < numOfPoints; ++i)
+		{
+			buffer.append(points.get(i));
+			if (i != numOfPoints - 1)
+				buffer.append(',');
+		}
+		buffer.append(')');
+		return buffer.toString();
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQVarbit.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQVarbit.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQVarbit.java
new file mode 100644
index 0000000..7bb733c
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/datatype/HAWQVarbit.java
@@ -0,0 +1,106 @@
+package com.pivotal.hawq.mapreduce.datatype;
+
+import com.pivotal.hawq.mapreduce.HAWQException;
+
+/**
+ * Store value of varbit/bit in database
+ */
+public class HAWQVarbit
+{
+	private byte[] bytes = null;
+	private byte[] bytesFromString = null;
+	private int numOfBits = -1;
+
+	/**
+	 * Initialize a varbit from string
+	 * 
+	 * @param value
+	 *            the value that varbit init from. Should be like this:
+	 *            10100010001001111
+	 * @throws HAWQException
+	 *             when this value is not correct for varbit
+	 */
+	public HAWQVarbit(String value) throws HAWQException
+	{
+		bytesFromString = new byte[value.length()];
+		for (int i = 0; i < value.length(); i++)
+		{
+			char c = value.charAt(i);
+			if (c != '0' && c != '1')
+				throw new HAWQException("'" + c + "'"
+						+ " is not a valid binary digit");
+			bytesFromString[i] = Byte.parseByte(String.valueOf(c));
+		}
+	}
+
+	/**
+	 * Initialize a varbit from byte array
+	 * 
+	 * @param bytes
+	 *            the byte array that varbit init from
+	 * @param numOfBits
+	 *            how may bit in this varbit
+	 */
+	public HAWQVarbit(byte[] bytes, int numOfBits)
+	{
+		this(bytes, 0, numOfBits);
+	}
+
+	/**
+	 * 
+	 * Initialize a varbit from byte array
+	 * 
+	 * @param bytes
+	 *            the byte array that varbit init from
+	 * @param offset
+	 *            offset in this byte array
+	 * @param numOfBits
+	 *            how may bit in this varbit
+	 */
+	public HAWQVarbit(byte[] bytes, int offset, int numOfBits)
+	{
+		int length = (numOfBits - 1) / 8 + 1;
+		this.bytes = new byte[length];
+		for (int i = 0; i < length; i++)
+			this.bytes[i] = bytes[offset + i];
+		this.numOfBits = numOfBits;
+	}
+
+	@Override
+	public boolean equals(Object obj)
+	{
+		if (obj instanceof HAWQVarbit)
+			return toString().equals(obj.toString());
+
+		return false;
+	}
+
+	@Override
+	public String toString()
+	{
+		StringBuffer buffer = new StringBuffer();
+		int arraySize;
+		if (bytes != null)
+		{
+			int outBitNum = 0;
+			arraySize = bytes.length;
+			for (int i = 0; i < arraySize; i++)
+			{
+				for (int j = 7; j >= 0; j--)
+				{
+					buffer.append((bytes[i] >> j) & 1);
+					++outBitNum;
+					if (outBitNum == numOfBits)
+						break;
+				}
+			}
+		}
+		else
+		{
+			arraySize = bytesFromString.length;
+			for (int i = 0; i < arraySize; i++)
+				buffer.append(bytesFromString[i]);
+		}
+		return buffer.toString();
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/file/HAWQAOFileStatus.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/file/HAWQAOFileStatus.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/file/HAWQAOFileStatus.java
new file mode 100644
index 0000000..686f03c
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/file/HAWQAOFileStatus.java
@@ -0,0 +1,69 @@
+package com.pivotal.hawq.mapreduce.file;
+
+/**
+ * Extends {@link HAWQFileStatus} and add particular attribute of file for
+ * append only files.
+ */
+public final class HAWQAOFileStatus extends HAWQFileStatus
+{
+	private boolean checksum;
+	private String compressType = null;
+	private int blockSize;
+
+	/**
+	 * Constructor
+	 * 
+	 * @param filePath path
+	 * @param fileLength file length
+	 * @param checksum checksum
+	 * @param compressType compress type, acceptable values are none | zlib | quicklz
+	 * @param blockSize block size
+	 */
+	public HAWQAOFileStatus(String filePath, long fileLength, boolean checksum,
+			String compressType, int blockSize)
+	{
+		super(filePath, fileLength);
+		this.checksum = checksum;
+		this.compressType = compressType;
+		this.blockSize = blockSize;
+
+		if (!("none".equals(compressType) || "zlib".equals(compressType) || "quicklz".equals(compressType)))
+			throw new UnsupportedOperationException("CompessType " + compressType + " is not supported");
+	}
+
+	/**
+	 * Get checksum of this file
+	 * 
+	 * @return checksum of this file
+	 */
+	public boolean getChecksum()
+	{
+		return checksum;
+	}
+
+	/**
+	 * Get compress type of this file
+	 * 
+	 * @return compress type of this file
+	 */
+	public String getCompressType()
+	{
+		return compressType;
+	}
+
+	/**
+	 * Get ao block size of this file
+	 * 
+	 * @return block size of this file
+	 */
+	public int getBlockSize()
+	{
+		return blockSize;
+	}
+
+	@Override
+	public String toString()
+	{
+		return filePath + "|" + fileLength + "|" + checksum + "|" + compressType + "|" + blockSize;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/file/HAWQFileStatus.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/file/HAWQFileStatus.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/file/HAWQFileStatus.java
new file mode 100644
index 0000000..2683e1b
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/file/HAWQFileStatus.java
@@ -0,0 +1,36 @@
+package com.pivotal.hawq.mapreduce.file;
+
+/**
+ * This class describe file status of database.
+ */
+public class HAWQFileStatus
+{
+	protected String filePath = null;
+	protected long fileLength;
+
+	public HAWQFileStatus(String filePath, long fileLength) {
+		this.filePath = filePath;
+		this.fileLength = fileLength;
+	}
+
+	/**
+	 * Get path string of this file
+	 * 
+	 * @return path
+	 */
+	public String getFilePath()
+	{
+		return filePath;
+	}
+
+	/**
+	 * Get file length of this file
+	 * 
+	 * @return file length
+	 */
+	public long getFileLength()
+	{
+		return fileLength;
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQAOTableMetadata.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQAOTableMetadata.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQAOTableMetadata.java
new file mode 100644
index 0000000..4789c5d
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQAOTableMetadata.java
@@ -0,0 +1,41 @@
+package com.pivotal.hawq.mapreduce.metadata;
+
+import com.pivotal.hawq.mapreduce.file.HAWQAOFileStatus;
+import com.pivotal.hawq.mapreduce.schema.HAWQSchema;
+
+/**
+ * Represent metadata for HAWQ append only row orientation table.
+ */
+public class HAWQAOTableMetadata {
+	private HAWQDatabaseMetadata dbMetadata;
+	private HAWQSchema schema;
+	private HAWQAOFileStatus[] fileStatuses;
+
+	public HAWQAOTableMetadata(HAWQDatabaseMetadata dbMetadata,
+							   HAWQSchema schema,
+							   HAWQAOFileStatus[] fileStatuses) {
+		this.dbMetadata = dbMetadata;
+		this.schema = schema;
+		this.fileStatuses = fileStatuses;
+	}
+
+	public String getDatabaseVersion() {
+		return dbMetadata.getVersion();
+	}
+
+	public String getDatabaseEncoding() {
+		return dbMetadata.getEncoding();
+	}
+
+	public String getDatabaseDfsURL() {
+		return dbMetadata.getDfsURL();
+	}
+
+	public HAWQSchema getSchema() {
+		return schema;
+	}
+
+	public HAWQAOFileStatus[] getFileStatuses() {
+		return fileStatuses;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQDatabaseMetadata.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQDatabaseMetadata.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQDatabaseMetadata.java
new file mode 100644
index 0000000..89ba652
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQDatabaseMetadata.java
@@ -0,0 +1,28 @@
+package com.pivotal.hawq.mapreduce.metadata;
+
+/**
+ * Represent common information about a HAWQ array.
+ */
+class HAWQDatabaseMetadata {
+	protected String version;
+	protected String encoding;
+	protected String dfsURL;
+
+	public HAWQDatabaseMetadata(String version, String encoding, String dfsURL) {
+		this.version = version;
+		this.encoding = encoding;
+		this.dfsURL = dfsURL;
+	}
+
+	public String getVersion() {
+		return version;
+	}
+
+	public String getEncoding() {
+		return encoding;
+	}
+
+	public String getDfsURL() {
+		return dfsURL;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQParquetTableMetadata.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQParquetTableMetadata.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQParquetTableMetadata.java
new file mode 100644
index 0000000..8b971f9
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQParquetTableMetadata.java
@@ -0,0 +1,33 @@
+package com.pivotal.hawq.mapreduce.metadata;
+
+import com.pivotal.hawq.mapreduce.file.HAWQFileStatus;
+
+/**
+ * Represent metadata for HAWQ Parquet table.
+ */
+public class HAWQParquetTableMetadata {
+	private HAWQDatabaseMetadata dbMetadata;
+	private HAWQFileStatus[] fileStatuses;
+
+	public HAWQParquetTableMetadata(HAWQDatabaseMetadata dbMetadata,
+									HAWQFileStatus[] fileStatuses) {
+		this.dbMetadata = dbMetadata;
+		this.fileStatuses = fileStatuses;
+	}
+
+	public String getDatabaseVersion() {
+		return dbMetadata.getVersion();
+	}
+
+	public String getDatabaseEncoding() {
+		return dbMetadata.getEncoding();
+	}
+
+	public String getDatabaseDfsURL() {
+		return dbMetadata.getDfsURL();
+	}
+
+	public HAWQFileStatus[] getFileStatuses() {
+		return fileStatuses;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQTableFormat.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQTableFormat.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQTableFormat.java
new file mode 100644
index 0000000..440f1ce
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/HAWQTableFormat.java
@@ -0,0 +1,21 @@
+package com.pivotal.hawq.mapreduce.metadata;
+
+/**
+ * Different storage format for table in HAWQ.
+ */
+public enum HAWQTableFormat {
+	AO {
+		public String getOrientation() { return "row"; }
+	},
+	CO {
+		public String getOrientation() { return "column"; }
+	},
+	Parquet {
+		public String getOrientation() { return "parquet"; }
+	},
+	Other;
+
+	public String getOrientation() {
+		throw new UnsupportedOperationException();
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataAccessException.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataAccessException.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataAccessException.java
new file mode 100644
index 0000000..0b4e8c9
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataAccessException.java
@@ -0,0 +1,19 @@
+package com.pivotal.hawq.mapreduce.metadata;
+
+/**
+ * Thrown to indicate that something goes wrong when extracting table's metadata.
+ */
+public class MetadataAccessException extends RuntimeException {
+
+	public MetadataAccessException(String message) {
+		super(message);
+	}
+
+	public MetadataAccessException(String message, Throwable cause) {
+		super(message, cause);
+	}
+
+	public MetadataAccessException(Throwable cause) {
+		super(cause);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataAccessor.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataAccessor.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataAccessor.java
new file mode 100644
index 0000000..f553dce
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataAccessor.java
@@ -0,0 +1,98 @@
+package com.pivotal.hawq.mapreduce.metadata;
+
+import com.pivotal.hawq.mapreduce.util.HAWQJdbcUtils;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+
+/**
+ * Convenient class for extracting table's metadata from HAWQ. This class is
+ * used internally by HAWQInputFormat and usually don't need to be used by user.
+ * It is available in public API for users who want to use HAWQAOInputFormat and
+ * HAWQParquetInputFormat directly.
+ * <p>
+ *
+ * Metadata can be extracted in two ways:
+ * <p>
+ * <ul>
+ *     <li>{@code MetadataAccessor.newInstanceUsingJDBC(...)} Connect to a running
+ *     	   HAWQ and get metadata from system catalog.</li>
+ *     <li>{@code MetadataAccessor.newInstanceUsingFile(...)} You must dump the metadata
+ *         into a file using <i>gpextract</i> in advance, then you can load the metadata
+ *         from file.</li>
+ * </ul>
+ *
+ * In either way, you can then call {@code getAOMetadata} or {@code getParquetMetadata}
+ * with respect to the actual table format indicated by {@code getTableFormat}. Only
+ * AO and Parquet tables are supported for now.
+ */
+public abstract class MetadataAccessor {
+
+	/**
+	 * Get the storage format of the input table.
+	 * @return the storage format of the input table
+	 */
+	public abstract HAWQTableFormat getTableFormat();
+
+	/**
+	 * Get metadata for an AO table. If the input table is not AO,
+	 * an IllegalStateException is threw.
+	 * @return metadata of the AO table
+	 */
+	public abstract HAWQAOTableMetadata getAOMetadata();
+
+	/**
+	 * Get metadata for a Parquet table. If the input table is not Parquet,
+	 * an IllegalStateException is threw.
+	 * @return metadata of the Parquet table
+	 */
+	public abstract HAWQParquetTableMetadata getParquetMetadata();
+
+	/**
+	 * Connect to database and extract table metadata using JDBC. Only AO and Parquet table are supported.
+	 * @param dbUrl		database connection string in form 'host:port/dbname'
+	 * @param username	name of the database user
+	 * @param password	password of the database user
+	 * @param tableName	name of an AO or Parquet table, valid format are 'namespace_name.table_name' or
+	 *                  simply 'table_name' if use the default 'public' namespace
+	 * @return MetadataAccessor instance with input table's metadata
+	 * @throws MetadataAccessException if failed to extract metadata
+	 */
+	public static MetadataAccessor newInstanceUsingJDBC(
+			String dbUrl,
+			String username,
+			String password,
+			String tableName) {
+
+		Connection conn = null;
+		try {
+			conn = HAWQJdbcUtils.getConnection("jdbc:postgresql://" + dbUrl, username, password);
+			return new MetadataSQLAccessor(conn, tableName);
+
+		} catch (Exception e) {
+			throw new MetadataAccessException("failed to extract metadata!", e);
+		} finally {
+			try {
+				HAWQJdbcUtils.closeConnection(conn);
+			} catch (SQLException e) {
+				throw new MetadataAccessException("failed to close Connection!", e);
+			}
+		}
+	}
+
+	/**
+	 * Reading table's metadata file generated by gpextract utility.
+	 * @param file Path to the metadata file
+	 * @return MetadataAccessor instance with input table's metadata
+	 * @throws MetadataAccessException  if failed to extract metadata
+	 */
+	public static MetadataAccessor newInstanceUsingFile(String file) {
+
+		try {
+			return new MetadataYAMLAccessor(file);
+
+		} catch (Exception e) {
+			throw new MetadataAccessException("error occurred when reading " + file, e);
+		}
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataSQLAccessor.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataSQLAccessor.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataSQLAccessor.java
new file mode 100644
index 0000000..3f539c8
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataSQLAccessor.java
@@ -0,0 +1,394 @@
+package com.pivotal.hawq.mapreduce.metadata;
+
+import com.google.common.collect.Lists;
+import com.pivotal.hawq.mapreduce.HAWQException;
+import com.pivotal.hawq.mapreduce.file.HAWQAOFileStatus;
+import com.pivotal.hawq.mapreduce.file.HAWQFileStatus;
+import com.pivotal.hawq.mapreduce.schema.HAWQField;
+import com.pivotal.hawq.mapreduce.schema.HAWQPrimitiveField;
+import com.pivotal.hawq.mapreduce.schema.HAWQSchema;
+import com.pivotal.hawq.mapreduce.util.HAWQJdbcUtils;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.sql.*;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Accessor for extracting table metadata by querying catalog using JDBC.
+ *
+ * Extracting will be done at construction phase.
+ */
+class MetadataSQLAccessor extends MetadataAccessor {
+	private Connection conn;
+
+	private int 	dbId;
+	private int 	dbTblSpcId;
+	private HAWQDatabaseMetadata dbMetadata;
+
+	/**
+	 *  DFS location (exclude DFS_PREFIX) for each segments, indexed by contentid.
+	 *  eg: ['/gpseg0', '/gpseg1']
+	 */
+	private String segmentLocation;
+
+	private HAWQTableFormat tableFormat;
+	private HAWQAOTableMetadata aoMetadata;
+	private HAWQParquetTableMetadata parquetMetadata;
+
+
+	private static class RelationInfo {
+		String 			relname;
+		int 			relid;
+		int 			relfilenode;
+		HAWQTableFormat relformat;
+
+		List<RelationInfo> partitions;
+
+		private RelationInfo(Map<String, String> relPGClass) {
+			this.relname		= relPGClass.get("relname");
+			this.relid			= Integer.parseInt(relPGClass.get("oid"));
+			this.relfilenode	= Integer.parseInt(relPGClass.get("relfilenode"));
+			this.relformat		= getStorageFormat(relPGClass.get("relstorage"));
+			this.partitions		= Lists.newArrayList();
+
+			if (relformat != HAWQTableFormat.AO && relformat != HAWQTableFormat.Parquet) {
+				throw new IllegalArgumentException(String.format(
+						"Unsupported storage format for table '%s', only AO and Parquet are supported.",
+						relname));
+			}
+ 		}
+
+		/**
+		 * Fetch relation information about the given table. Only AO or Parquet table are supported.
+		 * One-level partition table are also supported if the parent table and all subpartition table
+		 * have the same storage format, eg: both AO or both Parquet.
+		 * @param conn		database connection to use
+		 * @param tableName name of an AO/Parquet table, valid format are 'namespace_name.table_name' or
+		 *                  simply 'table_name' if use the default 'public' namespace
+		 * @return table's information
+		 * @throws IllegalArgumentException if `tableName` is not a valid table name
+		 * @throws SQLException if error happens when querying database.
+		 */
+		public static RelationInfo newInstanceFromDB(
+				Connection conn, String tableName) throws SQLException {
+
+			String relnamespace = "public";
+			String relname;
+
+			String[] nameParts = tableName.split("\\.");
+			if (nameParts.length == 1) {
+				relname = nameParts[0];
+
+			} else if (nameParts.length == 2) {
+				relnamespace = nameParts[0];
+				relname = nameParts[1];
+
+			} else {
+				throw new IllegalArgumentException("not a valid table name: " + tableName);
+			}
+
+			// get pg_class information for the target relation itself
+			Map<String, String> relPGClass = getPGClass(conn, relnamespace, relname);
+			if (relPGClass == null)
+				throw new IllegalArgumentException("table '" + tableName + "' does not exist!");
+
+			RelationInfo rel = new RelationInfo(relPGClass);
+
+			// get pg_class information for all the possible subpartition tables
+			for (Map<String, String> p : getPartitions(conn, relnamespace, relname)) {
+				if (p.get("parentpartitiontablename") != null) {
+					throw new IllegalArgumentException("multi-level partition table is not supported!");
+				}
+
+				relnamespace = p.get("partitionschemaname");
+				relname = p.get("partitiontablename");
+
+				RelationInfo partRel = new RelationInfo(getPGClass(conn, relnamespace, relname));
+				if (partRel.relformat != rel.relformat) {
+					throw new IllegalArgumentException(
+							String.format("partition table storage format inconsistent: %s is %s, but %s is %s",
+									rel.relname, rel.relformat, partRel.relname, partRel.relformat));
+				}
+
+				rel.partitions.add(partRel);
+			}
+
+			return rel;
+		}
+
+		private static Map<String, String> getPGClass(
+				Connection conn, String relnamespace, String relname) throws SQLException {
+			String sql = String.format(
+					"SELECT c.oid, c.*\n" +
+					"FROM pg_class c JOIN pg_namespace n ON c.relnamespace=n.oid\n" +
+					"WHERE n.nspname='%s' and c.relname='%s' and relkind='r'",
+					relnamespace, relname);
+			return HAWQJdbcUtils.executeSafeQueryForSingleRow(conn, sql);
+		}
+
+		private static HAWQTableFormat getStorageFormat(String relstorage) {
+			assert relstorage.length() == 1;
+			switch (relstorage.charAt(0)) {
+				case 'a':
+					return HAWQTableFormat.AO;
+				case 'c':
+					return HAWQTableFormat.CO;
+				case 'p':
+					return HAWQTableFormat.Parquet;
+				default:
+					return HAWQTableFormat.Other;
+			}
+		}
+
+		private static List<Map<String, String>> getPartitions(
+				Connection conn, String relnamespace, String relname) throws SQLException {
+			String sql = String.format(
+					"SELECT partitionschemaname, partitiontablename, partitionname,\n" +
+					"       partitiontype, parentpartitiontablename\n" +
+					"FROM pg_partitions\n" +
+					"WHERE schemaname='%s' and tablename='%s'",
+					relnamespace, relname);
+			return HAWQJdbcUtils.executeSafeQuery(conn, sql);
+		}
+	}
+
+	protected MetadataSQLAccessor(Connection conn, String tableName) throws SQLException {
+		this.conn = conn;
+		this.disableORCA();
+		this.extractGeneralDBInfo();
+
+		RelationInfo relation = RelationInfo.newInstanceFromDB(conn, tableName.toLowerCase());
+		this.tableFormat = relation.relformat;
+
+		switch (tableFormat) {
+			case AO:
+				aoMetadata = extractAOMetadata(relation);
+				break;
+			case Parquet:
+				parquetMetadata = extractParquetMetadata(relation);
+				break;
+			default:
+				throw new AssertionError(tableFormat);
+		}
+	}
+
+	private void disableORCA() throws SQLException {
+		Statement stmt = null;
+		try {
+			stmt = conn.createStatement();
+			stmt.execute("set optimizer=off;");
+
+		} finally {
+			HAWQJdbcUtils.free(stmt, null);
+		}
+	}
+
+	private void extractGeneralDBInfo() throws SQLException {
+		String sql = "SELECT oid, dat2tablespace, pg_encoding_to_char(encoding) encoding\n" +
+				"FROM pg_database WHERE datname=current_database()";
+		Map<String, String> res = HAWQJdbcUtils.executeSafeQueryForSingleRow(conn, sql);
+
+		dbId 		= Integer.parseInt(res.get("oid"));
+		dbTblSpcId	= Integer.parseInt(res.get("dat2tablespace"));
+		String encoding	= res.get("encoding");
+
+		sql = "select version() as version";
+		res = HAWQJdbcUtils.executeSafeQueryForSingleRow(conn, sql);
+		String version	= res.get("version");
+
+		sql = "SELECT fselocation\n" +
+				"FROM pg_filespace_entry\n" +
+				"JOIN pg_filespace fs ON fsefsoid=fs.oid\n" +
+				"WHERE fsname='dfs_system';";
+
+		List<Map<String, String>> rows = HAWQJdbcUtils.executeSafeQuery(conn, sql);
+
+		if (rows == null || rows.size() != 1)
+		{
+			HAWQException e = new HAWQException("Could NOT find the appropriate filespace entry", 0);
+			throw new AssertionError("failed to get table data file path prefix!", e);
+		}
+		
+		String dfsURL = null;
+	
+		Map<String, String> row = rows.get(0);
+		try {
+			URI uri = new URI(row.get("fselocation"));
+			dfsURL = String.format("%s://%s:%d", uri.getScheme(), uri.getHost(), uri.getPort());
+			this.segmentLocation = uri.getPath();
+		} catch(URISyntaxException e) {
+			throw new AssertionError("failed to parse segment locations!", e);
+		}
+		
+		dbMetadata = new HAWQDatabaseMetadata(version, encoding, dfsURL);
+	}
+
+	//------------------------------------------------------------
+	//---- extract AO table info
+	//------------------------------------------------------------
+
+	private HAWQAOTableMetadata extractAOMetadata(RelationInfo relation) throws SQLException {
+		HAWQSchema schema = extractAOSchema(relation);
+		HAWQAOFileStatus[] fileStatuses = extractAOFileStatuses(relation);
+		return new HAWQAOTableMetadata(dbMetadata, schema, fileStatuses);
+	}
+
+	private HAWQSchema extractAOSchema(RelationInfo rel) throws SQLException {
+		String sql = String.format(
+				"SELECT attname as name, typname as type\n" +
+						"FROM pg_attribute a join pg_type t on a.atttypid = t.oid\n" +
+						"WHERE attrelid=%d and attnum > 0\n" +
+						"ORDER BY attnum asc;", rel.relid);
+		List<Map<String, String>> rows = HAWQJdbcUtils.executeSafeQuery(conn, sql);
+		List<HAWQField> fields = Lists.newArrayList();
+
+		for (Map<String, String> row : rows) {
+			String fieldName = row.get("name");
+			String type = row.get("type");
+			HAWQField hawqField;
+			if (type.startsWith("_")) {
+				// supported array type
+				if (type.equals("_int4")
+						|| type.equals("_int8")
+						|| type.equals("_int2")
+						|| type.equals("_float4")
+						|| type.equals("_float8")
+						|| type.equals("_bool")
+						|| type.equals("_time")
+						|| type.equals("_date")
+						|| type.equals("_interval")) {
+
+					hawqField = HAWQSchema.optional_field_array(
+							HAWQPrimitiveField.PrimitiveType.valueOf(type.substring(1).toUpperCase()),
+							fieldName
+					);
+
+				} else {
+					throw new MetadataAccessException(
+							"unsupported array type " + type + " for field " + fieldName);
+				}
+
+			} else {
+				hawqField = HAWQSchema.optional_field(
+						HAWQPrimitiveField.PrimitiveType.valueOf(type.toUpperCase()),
+						fieldName
+				);
+			}
+
+			fields.add(hawqField);
+		}
+
+		return new HAWQSchema(rel.relname, fields);
+	}
+
+	private HAWQAOFileStatus[] extractAOFileStatuses(
+			RelationInfo relation) throws SQLException {
+		List<HAWQAOFileStatus> fileStatuses = Lists.newArrayList();
+
+		fileStatuses.addAll(loadAOFileStatuses(relation.relid, relation.relfilenode));
+		for (RelationInfo partRel : relation.partitions) {
+			fileStatuses.addAll(loadAOFileStatuses(partRel.relid, partRel.relfilenode));
+		}
+
+		return fileStatuses.toArray(new HAWQAOFileStatus[fileStatuses.size()]);
+	}
+
+	private List<HAWQAOFileStatus> loadAOFileStatuses(
+			int relid, int relfilenode) throws SQLException {
+		List<HAWQAOFileStatus> fileStatuses = Lists.newArrayList();
+
+		String sql = "SELECT blocksize, compresslevel, checksum, compresstype\n" +
+				"FROM pg_appendonly WHERE relid=" + relid;
+		Map<String, String> relAppendonly = HAWQJdbcUtils.executeSafeQueryForSingleRow(conn, sql);
+
+		int blockSize = Integer.parseInt(relAppendonly.get("blocksize"));
+		String compressType = relAppendonly.get("compresstype");
+		if (compressType == null) {
+			compressType = "none";
+		}
+		boolean checksum = relAppendonly.get("checksum").equals("t");
+
+		sql = String.format(
+				"SELECT segno as fileno, eof as filesize\n" +
+				"FROM pg_aoseg.pg_aoseg_%d\n" +
+				"ORDER by fileno;", relid);
+		List<Map<String, String>> rows = HAWQJdbcUtils.executeSafeQuery(conn, sql);
+
+		for (Map<String, String> row : rows) {
+			String filePath = String.format("%s/%d/%d/%d/%d",
+					segmentLocation,
+					dbTblSpcId,
+					dbId,
+					relfilenode,
+					Integer.parseInt(row.get("fileno")));
+
+			long fileSize = Long.parseLong(row.get("filesize"));
+			fileStatuses.add(new HAWQAOFileStatus(filePath, fileSize, checksum, compressType, blockSize));
+		}
+
+		return fileStatuses;
+	}
+
+	//------------------------------------------------------------
+	//---- extract Parquet table info
+	//------------------------------------------------------------
+
+	private HAWQParquetTableMetadata extractParquetMetadata(
+			RelationInfo relation) throws SQLException {
+		List<HAWQFileStatus> fileStatuses = Lists.newArrayList();
+
+		fileStatuses.addAll(extractParquetFileStatuses(relation.relid, relation.relfilenode));
+		for (RelationInfo partRel : relation.partitions) {
+			fileStatuses.addAll(extractParquetFileStatuses(partRel.relid, partRel.relfilenode));
+		}
+
+		return new HAWQParquetTableMetadata(
+				dbMetadata, fileStatuses.toArray(new HAWQFileStatus[fileStatuses.size()]));
+	}
+
+	private List<HAWQFileStatus> extractParquetFileStatuses(
+			int oid, int relfilenode) throws SQLException {
+
+		List<HAWQFileStatus> fileStatuses = Lists.newArrayList();
+
+		String sql = String.format(
+				"SELECT segno as fileno, eof as filesize\n" +
+				"FROM pg_aoseg.pg_paqseg_%d\n" +
+				"ORDER by fileno;", oid);
+		List<Map<String, String>> rows = HAWQJdbcUtils.executeSafeQuery(conn, sql);
+
+		for (Map<String, String> row : rows) {
+			String filePath = String.format("%s/%d/%d/%d/%d",
+					segmentLocation,
+					dbTblSpcId,
+					dbId,
+					relfilenode,
+					Integer.parseInt(row.get("fileno")));
+
+			fileStatuses.add(new HAWQFileStatus(filePath, Long.parseLong(row.get("filesize"))));
+		}
+
+		return fileStatuses;
+	}
+
+	@Override
+	public HAWQTableFormat getTableFormat() {
+		return tableFormat;
+	}
+
+	@Override
+	public HAWQAOTableMetadata getAOMetadata() {
+		if (tableFormat != HAWQTableFormat.AO)
+			throw new IllegalStateException("shouldn't call getAOMetadata on a " + tableFormat + " table!");
+		return aoMetadata;
+	}
+
+	@Override
+	public HAWQParquetTableMetadata getParquetMetadata() {
+		if (tableFormat != HAWQTableFormat.Parquet)
+			throw new IllegalStateException("shouldn't call getParquetMetadata on a " + tableFormat + " table!");
+		return parquetMetadata;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataYAMLAccessor.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataYAMLAccessor.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataYAMLAccessor.java
new file mode 100644
index 0000000..2006299
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/metadata/MetadataYAMLAccessor.java
@@ -0,0 +1,204 @@
+package com.pivotal.hawq.mapreduce.metadata;
+
+import com.pivotal.hawq.mapreduce.file.HAWQAOFileStatus;
+import com.pivotal.hawq.mapreduce.file.HAWQFileStatus;
+import com.pivotal.hawq.mapreduce.schema.HAWQField;
+import com.pivotal.hawq.mapreduce.schema.HAWQPrimitiveField;
+import com.pivotal.hawq.mapreduce.schema.HAWQSchema;
+import org.yaml.snakeyaml.Yaml;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Accessor for reading metadata from a YAML formatted file,
+ * which was generated using gpextract utility.
+ */
+class MetadataYAMLAccessor extends MetadataAccessor {
+	private HAWQTableFormat tableFormat;
+	private String tableName;
+
+	private HAWQAOTableMetadata aoMetadata;
+	private HAWQParquetTableMetadata parquetMetadata;
+
+	protected MetadataYAMLAccessor(String file) throws FileNotFoundException {
+		InputStream input = new FileInputStream(new File(file));
+		Yaml yaml = new Yaml();
+
+		Map<?, ?> metadata = (Map<?, ?>) yaml.load(input);
+
+		String dbVersion = metadata.get("DBVersion").toString();
+		String dbEncoding = metadata.get("Encoding").toString();
+		String dbDfsURL = metadata.get("DFS_URL").toString();
+		HAWQDatabaseMetadata dbMetadata = new HAWQDatabaseMetadata(dbVersion, dbEncoding, dbDfsURL);
+
+		tableName = metadata.get("TableName").toString();
+
+		String format = metadata.get("FileFormat").toString();
+		tableFormat = HAWQTableFormat.valueOf(format);
+
+		switch (tableFormat) {
+			case AO:
+				aoMetadata = extractAOMetadata(dbMetadata, metadata);
+				break;
+			case Parquet:
+				parquetMetadata = extractParquetMetadata(dbMetadata, metadata);
+				break;
+			default:
+				throw new UnsupportedOperationException(tableFormat + " table is not supported!");
+		}
+	}
+
+	//------------------------------------------------------------
+	//---- extract AO table info
+	//------------------------------------------------------------
+
+	private HAWQAOTableMetadata extractAOMetadata(HAWQDatabaseMetadata dbMetadata, Map<?,?> metadata) {
+		HAWQSchema schema = extractAOSchema((List<?>) metadata.get("AO_Schema"));
+		HAWQAOFileStatus[] fileStatuses = extractAOFileStatuses((Map<?, ?>) metadata.get("AO_FileLocations"));
+		return new HAWQAOTableMetadata(dbMetadata, schema, fileStatuses);
+	}
+
+	private HAWQSchema extractAOSchema(List<?> ao_schema) {
+		List<HAWQField> hawqFields = new ArrayList<HAWQField>();
+
+		for (int i = 0; i < ao_schema.size(); ++i) {
+			Map<?, ?> field = (Map<?, ?>) ao_schema.get(i);
+			String fieldName = field.get("name").toString();
+			String type = field.get("type").toString();
+			HAWQField hawqField = null;
+			// TODO move this logic to AOInputFormat
+			if (type.startsWith("_")) {
+				// supported array type
+				if (type.equals("_int4")
+						|| type.equals("_int8")
+						|| type.equals("_int2")
+						|| type.equals("_float4")
+						|| type.equals("_float8")
+						|| type.equals("_bool")
+						|| type.equals("_time")
+						|| type.equals("_date")
+						|| type.equals("_interval")) {
+
+					hawqField = HAWQSchema.optional_field_array(
+							HAWQPrimitiveField.PrimitiveType.valueOf(type.substring(1).toUpperCase()),
+							fieldName
+					);
+
+				} else {
+					throw new UnsupportedOperationException(
+							"unsupported array type " + type + " for field " + fieldName);
+				}
+
+			} else {
+				hawqField = HAWQSchema.optional_field(
+						HAWQPrimitiveField.PrimitiveType.valueOf(type.toUpperCase()),
+						fieldName
+				);
+			}
+
+			assert hawqField != null;
+			hawqFields.add(hawqField);
+		}
+
+		return new HAWQSchema(tableName, hawqFields);
+	}
+
+	private HAWQAOFileStatus[] extractAOFileStatuses(Map<?,?> ao_fileLocations) {
+		List<HAWQAOFileStatus> result = new ArrayList<HAWQAOFileStatus>();
+
+		result.addAll(loadAOFileStatuses(ao_fileLocations));
+
+		List<?> partitions = (List<?>) ao_fileLocations.get("Partitions");
+		if (partitions != null) {
+			for (int i = 0; i < partitions.size(); ++i) {
+				result.addAll(loadAOFileStatuses((Map<?, ?>) partitions.get(i)));
+			}
+		}
+
+		return result.toArray(new HAWQAOFileStatus[result.size()]);
+	}
+
+	private List<HAWQAOFileStatus> loadAOFileStatuses(Map<?, ?> fileLocations) {
+		List<HAWQAOFileStatus> fileStatuses = new ArrayList<HAWQAOFileStatus>();
+
+		int blockSize = Integer.parseInt(fileLocations.get("Blocksize").toString());
+		boolean checksum = Boolean.parseBoolean(fileLocations.get("Checksum").toString());
+		String compressType = fileLocations.get("CompressionType") == null ?
+				"none" : fileLocations.get("CompressionType").toString().toLowerCase();
+
+		List<?> files = (List<?>) fileLocations.get("Files");
+
+		for (int i = 0; i < files.size(); ++i) {
+			Map<?, ?> file = (Map<?, ?>) files.get(i);
+			String filePath = file.get("path").toString();
+			long fileLen = Long.parseLong(file.get("size").toString());
+
+			fileStatuses.add(new HAWQAOFileStatus(filePath, fileLen, checksum, compressType, blockSize));
+		}
+
+		return fileStatuses;
+	}
+
+	//------------------------------------------------------------
+	//---- extract Parquet table info
+	//------------------------------------------------------------
+
+	private HAWQParquetTableMetadata extractParquetMetadata(HAWQDatabaseMetadata dbMetadata, Map<?, ?> metadata) {
+		return new HAWQParquetTableMetadata(
+				dbMetadata,
+				extractParquetFileStatuses((Map<?, ?>) metadata.get("Parquet_FileLocations")));
+	}
+
+	private HAWQFileStatus[] extractParquetFileStatuses(Map<?, ?> parquet_fileLocations) {
+		List<HAWQFileStatus> result = new ArrayList<HAWQFileStatus>();
+
+		result.addAll(loadParquetFileStatuses(parquet_fileLocations));
+
+		List<?> partitions = (List<?>) parquet_fileLocations.get("Partitions");
+		if (partitions != null) {
+			for (int i = 0; i < partitions.size(); ++i) {
+				result.addAll(loadParquetFileStatuses((Map<?, ?>) partitions.get(i)));
+			}
+		}
+
+		return result.toArray(new HAWQFileStatus[result.size()]);
+	}
+
+	private List<HAWQFileStatus> loadParquetFileStatuses(Map<?,?> fileLocations) {
+		List<HAWQFileStatus> fileStatuses = new ArrayList<HAWQFileStatus>();
+
+		List<?> files = (List<?>) fileLocations.get("Files");
+		for (int i = 0; i < files.size(); ++i) {
+			Map<?, ?> file = (Map<?, ?>) files.get(i);
+			final String filePath = file.get("path").toString();
+			final long fileLen = Long.parseLong(file.get("size").toString());
+			fileStatuses.add(new HAWQFileStatus(filePath, fileLen));
+		}
+		return fileStatuses;
+	}
+
+	@Override
+	public HAWQTableFormat getTableFormat() {
+		return tableFormat;
+	}
+
+	@Override
+	public HAWQAOTableMetadata getAOMetadata() {
+		if (tableFormat != HAWQTableFormat.AO)
+			throw new IllegalStateException("shouldn't call getAOMetadata on a " + tableFormat + " table!");
+		return aoMetadata;
+	}
+
+	@Override
+	public HAWQParquetTableMetadata getParquetMetadata() {
+		if (tableFormat != HAWQTableFormat.Parquet)
+			throw new IllegalStateException("shouldn't call getParquetMetadata on a " + tableFormat + " table!");
+		return parquetMetadata;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQField.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQField.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQField.java
new file mode 100644
index 0000000..45b55b1
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQField.java
@@ -0,0 +1,98 @@
+package com.pivotal.hawq.mapreduce.schema;
+
+/**
+ * Represent a field in HAWQ's schema.
+ */
+public abstract class HAWQField {
+
+	private boolean isOptional;
+	private String name;
+	private boolean isArray;
+
+	/**
+	 * Construct a HAWQField instance
+	 * @param isOptional whether the field is optional
+	 * @param name name of the field
+	 * @param isArray whether the field is an array
+	 */
+	public HAWQField(boolean isOptional, String name, boolean isArray) {
+		this.isOptional = isOptional;
+		this.name = name;
+		this.isArray = isArray;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (obj == null || !(obj instanceof HAWQField))
+			return false;
+		return equalsField((HAWQField) obj);
+	}
+
+	/**
+	 * Test equality of two HAWQField objects.
+	 * @param other
+	 * @return true if the two HAWQField are equal, false otherwise
+	 */
+	protected abstract boolean equalsField(HAWQField other);
+
+	/**
+	 * indicate whether the field is of primitive type
+	 * @return true if the field is of primitive type, false otherwise
+	 */
+	abstract public boolean isPrimitive();
+
+	/**
+	 * Write field's String representation to a buffer `sb` with specified indent
+	 * @param sb buffer to use
+	 * @param indent any indentation to use
+	 */
+	abstract public void writeToStringBuilder(StringBuilder sb, String indent);
+
+	/**
+	 * Cast the object into HAWQPrimitiveField.
+	 * @return the casted object
+	 * @throws ClassCastException throw an ClassCastException when the cast fails.
+	 */
+	public HAWQPrimitiveField asPrimitive() throws ClassCastException {
+		if (!isPrimitive()) {
+			throw new ClassCastException(this + " is not a primitive field");
+		}
+		return (HAWQPrimitiveField) this;
+	}
+
+	/**
+	 * Cast the object into HAWQGroupField.
+	 * @return the casted object
+	 * @throws ClassCastException throw an ClassCastException when the cast fails.
+	 */
+	public HAWQGroupField asGroup() throws ClassCastException {
+		if (isPrimitive()) {
+			throw new ClassCastException(this + " is not a group field");
+		}
+		return (HAWQGroupField) this;
+	}
+
+	/**
+	 * Get whether the field is optional.
+	 * @return true if optional, false if not
+	 */
+	public boolean isOptional() {
+		return isOptional;
+	}
+
+	/**
+	 * Get the field's name
+	 * @return field's name
+	 */
+	public String getName() {
+		return name;
+	}
+
+	/**
+	 * Get whether the field is an array.
+	 * @return true if is array, false if not
+	 */
+	public boolean isArray() {
+		return isArray;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQGroupField.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQGroupField.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQGroupField.java
new file mode 100644
index 0000000..a735933
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQGroupField.java
@@ -0,0 +1,208 @@
+package com.pivotal.hawq.mapreduce.schema;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Represent a group field in HAWQ's schema. A group field contains one or more sub fields.
+ * Note that with the group, sub field's index starts with 1 rather than 0.
+ */
+public class HAWQGroupField extends HAWQField {
+
+	private List<HAWQField> fields;
+	private String dataTypeName; /* group maps to UDT in HAWQ, thus this is the name of the UDT */
+
+	private Map<String, Integer> indexByName;
+
+	/**
+	 * Constructor of HAWQGroupField.
+	 *
+	 * <p>Instead of using this constructor, we recommend you to use
+	 * factory methods defined in HAWQSchema:</p>
+	 *
+	 * <ul>
+	 *     <li>HAWQSchema.required_group(...)</li>
+	 *     <li>HAWQSchema.optional_group(...)</li>
+	 *     <li>HAWQSchema.required_group_array(...)</li>
+	 *     <li>HAWQSchema.optional_group_array(...)</li>
+	 * </ul>
+	 *
+	 * @param isOptional whether the field is optional or not
+	 * @param isArray whether the field is an array or not
+	 * @param name name of the field
+	 * @param dataTypeName name of the UDT type in HAWQ this group maps to. This is optional.
+	 * @param fields fields of the group field
+	 */
+	public HAWQGroupField(boolean isOptional, boolean isArray, String name,
+							 String dataTypeName, HAWQField... fields) {
+		this(isOptional, isArray, name, dataTypeName, Arrays.asList(fields));
+	}
+
+	/**
+	 * Constructor of HAWQGroupField.
+	 *
+	 * <p>Instead of using this constructor, we recommend you to use
+	 * factory methods defined in HAWQSchema:</p>
+	 *
+	 * <ul>
+	 *     <li>HAWQSchema.required_group(...)</li>
+	 *     <li>HAWQSchema.optional_group(...)</li>
+	 *     <li>HAWQSchema.required_group_array(...)</li>
+	 *     <li>HAWQSchema.optional_group_array(...)</li>
+	 * </ul>
+	 *
+	 * @param isOptional whether the field is optional or not
+	 * @param isArray whether the field is an array or not
+	 * @param name name of the field
+	 * @param dataTypeName name of the UDT type in HAWQ this group maps to. This is optional.
+	 * @param fields fields of the group field
+	 */
+	public HAWQGroupField(boolean isOptional, boolean isArray, String name,
+							 String dataTypeName, List<HAWQField> fields) {
+		super(isOptional, name, isArray);
+		// use empty string internally as missing value of data type name
+		if (dataTypeName == null)
+			dataTypeName = "";
+		this.dataTypeName = dataTypeName;
+		this.fields = fields;
+
+		this.indexByName = new HashMap<String, Integer>();
+		for (int i = 0; i < fields.size(); i++) {
+			indexByName.put(fields.get(i).getName(), i);
+		}
+	}
+
+	/**
+	 * Get number of fields this group contains.
+	 * @return fields' number
+	 */
+	public int getFieldCount() {
+		return fields.size();
+	}
+
+	/**
+	 * Get index of a field by field index.
+	 * NOTE: field index starts with 1 rather than 0.
+	 *
+	 * @param fieldName field's name
+	 * @return index of field
+	 */
+	public int getFieldIndex(String fieldName) {
+		/*
+		 * GPSQL-1031
+		 * 
+		 * When field is not existed in this schema, throw a readable exception for user
+		 */
+		if (!indexByName.containsKey(fieldName))
+			throw new IllegalArgumentException("Field '" + fieldName + "' not found");
+		return indexByName.get(fieldName) + 1;
+	}
+
+	/**
+	 * Get field by its index in the group.
+	 * NOTE: field index starts with 1 rather than 0.
+	 *
+	 * @param fieldIndex field's index in the group
+	 * @return field having the given index
+	 */
+	public HAWQField getField(int fieldIndex) {
+		return fields.get(fieldIndex - 1);
+	}
+
+	/**
+	 * Get field by field name.
+	 * @param fieldName field's name
+	 * @return field having the given name.
+	 */
+	public HAWQField getField(String fieldName) {
+		return getField(getFieldIndex(fieldName));
+	}
+
+	/**
+	 * Get field's type name in lowercase by field's index in the group. Group field's
+	 * type name is "group".
+	 * NOTE: field index starts with 1 rather than 0.
+	 *
+	 * @param fieldIndex field's index in the group
+	 * @return field's type name in lowercase
+	 */
+	public String getFieldType(int fieldIndex) {
+		HAWQField field = getField(fieldIndex);
+		if (field.isPrimitive())
+			return field.asPrimitive().getType().toString().toLowerCase();
+		return "group";
+	}
+
+	/**
+	 * Get field's type name in lowercase by field's name. Group field's
+	 * type name is "group".
+	 * @param fieldName field's name
+	 * @return field's type name in lowercase
+	 */
+	public String getFieldType(String fieldName) {
+		return getFieldType(getFieldIndex(fieldName));
+	}
+
+	@Override
+	protected boolean equalsField(HAWQField other) {
+		if (other.isPrimitive()) return false;
+		HAWQGroupField g = other.asGroup();
+		return  this.isOptional() == g.isOptional() &&
+				this.isArray() == g.isArray() &&
+				this.getName().equals(g.getName()) &&
+				this.getDataTypeName().equals(g.getDataTypeName()) &&
+				this.getFields().equals(g.getFields());
+	}
+
+	@Override
+	public boolean isPrimitive() {
+		return false;
+	}
+
+	/**
+	 * Get all fields of the group.
+	 * @return fields of the group
+	 */
+	public List<HAWQField> getFields() {
+		return fields;
+	}
+
+	/**
+	 * Get data type name of this group field.
+	 * @return data type name of the field
+	 */
+	public String getDataTypeName() {
+		return dataTypeName;
+	}
+
+	protected void writeMembersToStringBuilder(StringBuilder sb, String indent) {
+		for (int i = 0; i < fields.size(); i++) {
+			if (i != 0) sb.append("\n");
+			fields.get(i).writeToStringBuilder(sb, indent + "  ");
+		}
+	}
+
+	@Override
+	public void writeToStringBuilder(StringBuilder sb, String indent) {
+		sb.append(indent)
+				.append(isOptional() ? "optional " : "required ")
+				.append("group")
+				.append(isArray() ? "[] " : " ")
+				.append(getName())
+				.append(getDataTypeName() == null ? " " : " (" + getDataTypeName() + ") ")
+				.append("{\n");
+
+		writeMembersToStringBuilder(sb, indent);
+
+		sb.append("\n").append(indent).append("}");
+	}
+
+	@Override
+	public String toString() {
+		StringBuilder sb = new StringBuilder();
+		writeToStringBuilder(sb, "");
+		return sb.toString();
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQPrimitiveField.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQPrimitiveField.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQPrimitiveField.java
new file mode 100644
index 0000000..9a4103c
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQPrimitiveField.java
@@ -0,0 +1,90 @@
+package com.pivotal.hawq.mapreduce.schema;
+
+/**
+ * Represent a primitive field in HAWQ's schema.
+ * See PrimitiveType for all primitive types.
+ */
+public class HAWQPrimitiveField extends HAWQField {
+
+	/**
+	 * Enumeration of all supported primitive type.
+	 * Note that CHAR and BPCHAR are the same, we include BPCHAR for internal usage.
+	 */
+	public static enum PrimitiveType {
+		BOOL, BIT, VARBIT, BYTEA, INT2, INT4, INT8, FLOAT4, FLOAT8, NUMERIC,
+		CHAR, BPCHAR, VARCHAR, TEXT, DATE, TIME, TIMETZ, TIMESTAMP, TIMESTAMPTZ, INTERVAL,
+		POINT, LSEG, BOX, CIRCLE, PATH, POLYGON, MACADDR, INET, CIDR, XML
+	}
+
+	private PrimitiveType type;
+
+	/**
+	 * Constructor for HAWQPrimitiveField.
+	 *
+	 * <p>Instead of using this constructor, we recommend you to use
+	 * factory methods defined in HAWQSchema:</p>
+	 *
+	 * <ul>
+	 *     <li>HAWQSchema.required_field(...)</li>
+	 *     <li>HAWQSchema.optional_field(...)</li>
+	 *     <li>HAWQSchema.required_field_array(...)</li>
+	 *     <li>HAWQSchema.optional_field_array(...)</li>
+	 * </ul>
+	 *
+	 * @param isOptional whether the field is optional or not
+	 * @param name name of the field
+	 * @param type type of the field
+	 * @param isArray whether the field is an array
+	 */
+	public HAWQPrimitiveField(boolean isOptional, String name, PrimitiveType type, boolean isArray) {
+		super(isOptional, name, isArray);
+		// use BPCHAR internally
+		if (type == PrimitiveType.CHAR) {
+			type = PrimitiveType.BPCHAR;
+		}
+		this.type = type;
+	}
+
+	@Override
+	protected boolean equalsField(HAWQField other) {
+		if (other.isPrimitive()) {
+			HAWQPrimitiveField p = other.asPrimitive();
+			return  this.isOptional() == p.isOptional() &&
+					this.getType() == p.getType() &&
+					this.isArray() == p.isArray() &&
+					this.getName().equals(p.getName());
+		}
+		return false;
+	}
+
+	@Override
+	public boolean isPrimitive() {
+		return true;
+	}
+
+	/**
+	 * Get field's type.
+	 * @return field's type
+	 */
+	public PrimitiveType getType() {
+		return type;
+	}
+
+	@Override
+	public void writeToStringBuilder(StringBuilder sb, String indent) {
+		sb.append(indent)
+				.append(isOptional() ? "optional " : "required ")
+				.append(getType().toString().toLowerCase())
+				.append(isArray() ? "[] " : " ")
+				.append(getName())
+				.append(";");
+	}
+
+	@Override
+	public String toString() {
+		StringBuilder sb = new StringBuilder("HAWQPrimitiveField { ");
+		this.writeToStringBuilder(sb, "");
+		sb.append(" }");
+		return sb.toString();
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b26974c/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQSchema.java
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQSchema.java b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQSchema.java
new file mode 100644
index 0000000..0e78469
--- /dev/null
+++ b/contrib/hawq-hadoop/hawq-mapreduce-common/src/main/java/com/pivotal/hawq/mapreduce/schema/HAWQSchema.java
@@ -0,0 +1,308 @@
+package com.pivotal.hawq.mapreduce.schema;
+
+import com.pivotal.hawq.mapreduce.schema.HAWQPrimitiveField.PrimitiveType;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.StringTokenizer;
+
+/**
+ * This class represents HAWQ's schema. A HAWQ schema contains one or more fields,
+ * field can be primitive field or group field.
+ */
+public class HAWQSchema extends HAWQGroupField {
+
+	/**
+	 * Constructor for HAWQSchema.
+	 * @param name name of the schema
+	 * @param fields fields of the schema
+	 */
+	public HAWQSchema(String name, HAWQField... fields) {
+		this(name, Arrays.asList(fields));
+	}
+
+	/**
+	 * Constructor for HAWQSchema.
+	 * @param name name of the schema
+	 * @param fields fields of the schema
+	 */
+	public HAWQSchema(String name, List<HAWQField> fields) {
+		super(false, false, name, null, fields);
+	}
+
+	@Override
+	public void writeToStringBuilder(StringBuilder sb, String indent) {
+		sb.append("message ").append(getName()).append(" {\n");
+		writeMembersToStringBuilder(sb, indent);
+		sb.append("\n}");
+	}
+
+	/***
+	 * Build HAWQSchema from schema string, which is an inverse procedure of toString.
+	 * @param schemaString string representation of HAWQSchema
+	 * @return schema constructed from string
+	 */
+	public static HAWQSchema fromString(String schemaString) {
+		SchemaParser parser = new SchemaParser(new SchemaLexer(schemaString));
+		return parser.parse();
+	}
+
+	//-------------------------------------------------------------------------
+	// Parser class for HAWQSchema
+	//-------------------------------------------------------------------------
+
+	private static class SchemaLexer {
+		private StringTokenizer tokenizer;
+		private int lineNumber;
+		private StringBuilder currentLine;
+
+		public SchemaLexer(String schemaString) {
+			tokenizer = new StringTokenizer(schemaString, " \t\n:{}[]();", /* returnDelims= */true);
+			lineNumber = 1;
+			currentLine = new StringBuilder();
+		}
+
+		public String nextToken() {
+			while (tokenizer.hasMoreTokens()) {
+				String token = tokenizer.nextToken();
+				if (token.equals("\n")) {
+					lineNumber++;
+					currentLine.setLength(0);
+				} else {
+					currentLine.append(token);
+				}
+				if (token.equals(" ") || token.equals("\t") || token.equals("\n"))
+					continue; // ignore whitespaces
+				return token;
+			}
+			throw new IllegalArgumentException("unexpected end of schema");
+		}
+
+		public boolean hasMoreTokens() {
+			return tokenizer.hasMoreTokens();
+		}
+
+		public String getLocationString() {
+			return "line " + lineNumber + ":" + currentLine;
+		}
+	}
+
+	private static class SchemaParser {
+		private SchemaLexer lexer;
+
+		public SchemaParser(SchemaLexer lexer) {
+			this.lexer = lexer;
+		}
+
+		public HAWQSchema parse() {
+			HAWQSchema schema = readSchema(lexer.nextToken());
+			if (lexer.hasMoreTokens())
+				throw new IllegalArgumentException("extra data on line " + lexer.lineNumber);
+			return schema;
+		}
+
+		/**
+		 * schema := 'message' <schemaName> '{' fields '}'
+		 * fields := (field)*
+		 */
+		private HAWQSchema readSchema(String token) {
+			matchToken(token, "message", "start with 'message'");
+			String schemaName = lexer.nextToken();
+			matchToken(lexer.nextToken(), "{");
+
+			List<HAWQField> fields = new ArrayList<HAWQField>();
+			while (!(token = lexer.nextToken()).equals("}")) {
+				fields.add(readField(token));
+			}
+
+			matchToken(token, "}");
+			return new HAWQSchema(schemaName, fields);
+		}
+
+		/**
+		 * field := ('required' | 'optional') (primitive_field | group_field)
+		 */
+		private HAWQField readField(String token) {
+			if (!token.equals("required") && !token.equals("optional"))
+				throw new IllegalArgumentException(String.format(
+						"missing 'required' or 'optional' keyword for field definition, found '%s' at %s",
+						token, lexer.getLocationString()));
+
+			boolean isOptional = token.equals("optional");
+			token = lexer.nextToken();
+
+			if (token.equals("group"))
+				return readGroupField(isOptional, token);
+			return readPrimitiveField(isOptional, token);
+		}
+
+		/**
+		 * group_field := 'group' ('[]')? <groupName> ('('<dataTypeName>')')? '{' fields '}'
+		 */
+		private HAWQField readGroupField(boolean isOptional, String token) {
+			boolean isArray = false;
+			String groupName = null;
+			String dataTypeName = null;
+
+			matchToken(token, "group");
+			token = lexer.nextToken();
+
+			if (token.equals("[")) {
+				matchToken(lexer.nextToken(), "]");
+				isArray = true;
+				token = lexer.nextToken();
+			}
+			groupName = token;
+
+			token = lexer.nextToken();
+			if (token.equals("(")) {
+				dataTypeName = lexer.nextToken();
+				matchToken(lexer.nextToken(), ")", "datatype name for group");
+				token = lexer.nextToken();
+			}
+			matchToken(token, "{", "start of group");
+
+			List<HAWQField> fields = new ArrayList<HAWQField>();
+			while (!(token = lexer.nextToken()).equals("}")) {
+				fields.add(readField(token));
+			}
+			matchToken(token, "}");
+
+			return new HAWQGroupField(isOptional, isArray, groupName, dataTypeName, fields);
+		}
+
+		/**
+		 * primitive_field := <primitive_field_type> ('[]')? <fieldName> ';'
+		 */
+		private HAWQField readPrimitiveField(boolean isOptional, String token) {
+			PrimitiveType type = null;
+			boolean isArray = false;
+			String fieldName = null;
+
+			try {
+				type = PrimitiveType.valueOf(token.toUpperCase());
+			} catch (IllegalArgumentException e) {
+				throw new IllegalArgumentException(String.format(
+						"unsupported primitive field type '%s' at %s", token, lexer.getLocationString()
+				));
+			}
+
+			token = lexer.nextToken();
+			if (token.equals("[")) {
+				matchToken(lexer.nextToken(), "]");
+				isArray = true;
+				token = lexer.nextToken();
+			}
+			fieldName = token;
+			matchToken(lexer.nextToken(), ";", "primitive field should end with ';'");
+
+			return new HAWQPrimitiveField(isOptional, fieldName, type, isArray);
+		}
+
+		private void matchToken(String token, String expect) {
+			matchToken(token, expect, null);
+		}
+
+		private void matchToken(String token, String expect, String message) {
+			if (!token.equals(expect)) {
+				if (message == null || "".equals(message.trim()))
+					throw new IllegalArgumentException(String.format(
+							"expect '%s', but found '%s' at %s", expect, token, lexer.getLocationString()
+					));
+				else
+					throw new IllegalArgumentException(String.format(
+							"%s: expect '%s', but found '%s' at %s", message, expect, token, lexer.getLocationString()
+					));
+			}
+		}
+	}
+
+	//-------------------------------------------------------------------------
+	// Factory methods to create HAWQField
+	//-------------------------------------------------------------------------
+
+	/**
+	 * Construct a required non-array primitive field.
+	 * @param type type of the field
+	 * @param name name of the field
+	 * @return constructed field object
+	 */
+	public static HAWQField required_field(PrimitiveType type, String name) {
+		return new HAWQPrimitiveField(false, name, type, false);
+	}
+
+	/**
+	 * Construct a optional non-array primitive field.
+	 * @param type type of the field
+	 * @param name name of the field
+	 * @return constructed field object
+	 */
+	public static HAWQField optional_field(PrimitiveType type, String name) {
+		return new HAWQPrimitiveField(true, name, type, false);
+	}
+
+	/**
+	 * Construct a required primitive array field.
+	 * @param type type of the field
+	 * @param name name of the field
+	 * @return constructed field object
+	 */
+	public static HAWQField required_field_array(PrimitiveType type, String name) {
+		return new HAWQPrimitiveField(false, name, type, true);
+	}
+
+	/**
+	 * Construct a optional primitive array field.
+	 * @param type type of the field
+	 * @param name name of the field
+	 * @return constructed field object
+	 */
+	public static HAWQField optional_field_array(PrimitiveType type, String name) {
+		return new HAWQPrimitiveField(true, name, type, true);
+	}
+
+	/**
+	 * Construct a required non-array group field.
+	 * @param name name of the group
+	 * @param dataTypeName date type name of the group
+	 * @param fields fields of the group
+	 * @return constructed group field
+	 */
+	public static HAWQField required_group(String name, String dataTypeName, HAWQField... fields) {
+		return new HAWQGroupField(false, false, name, dataTypeName, fields);
+	}
+
+	/**
+	 * Construct a optional non-array group field.
+	 * @param name name of the group
+	 * @param dataTypeName date type name of the group
+	 * @param fields fields of the group
+	 * @return constructed group field
+	 */
+	public static HAWQField optional_group(String name, String dataTypeName, HAWQField... fields) {
+		return new HAWQGroupField(true, false, name, dataTypeName, fields);
+	}
+
+	/**
+	 * Construct a required group array field.
+	 * @param name name of the group
+	 * @param dataTypeName date type name of the group
+	 * @param fields fields of the group
+	 * @return constructed group field
+	 */
+	public static HAWQField required_group_array(String name, String dataTypeName, HAWQField... fields) {
+		return new HAWQGroupField(false, true, name, dataTypeName, fields);
+	}
+
+	/**
+	 * Construct a optional group array field.
+	 * @param name name of the group
+	 * @param dataTypeName date type name of the group
+	 * @param fields fields of the group
+	 * @return constructed group field
+	 */
+	public static HAWQField optional_group_array(String name, String dataTypeName, HAWQField... fields) {
+		return new HAWQGroupField(true, true, name, dataTypeName, fields);
+	}
+}


Mime
View raw message