hawq-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nh...@apache.org
Subject [6/7] incubator-hawq git commit: HAWQ-185. Remove unused PXF regression tests
Date Tue, 01 Dec 2015 17:28:06 GMT
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7eeeec9d/src/test/regress/helpers/DummyAnalyzer.java
----------------------------------------------------------------------
diff --git a/src/test/regress/helpers/DummyAnalyzer.java b/src/test/regress/helpers/DummyAnalyzer.java
deleted file mode 100644
index dcec45b..0000000
--- a/src/test/regress/helpers/DummyAnalyzer.java
+++ /dev/null
@@ -1,31 +0,0 @@
-import com.pivotal.pxf.api.Analyzer;
-import com.pivotal.pxf.api.AnalyzerStats;
-import com.pivotal.pxf.api.utilities.InputData;
-
-/**
- * Class that defines getting statistics for ANALYZE.
- * {@link #getEstimatedStats} returns statistics for a given path
- * (block size, number of blocks, number of tuples).
- * Used when calling ANALYZE on a PXF external table,
- * to get table's statistics that are used by the optimizer to plan queries. 
- * Dummy implementation, for documentation.
- */
-public class DummyAnalyzer extends Analyzer {
-    public DummyAnalyzer(InputData metaData) {
-        super(metaData);
-    }
-
-    /**
-     * Gets the estimated statistics for a given path in json format.
-     *
-     * @param data the URI that can appear as a file name, a directory name or a wildcard
-     * @return the estimated resource statistics in json format
-     * @throws Exception
-     */
-    @Override
-    public AnalyzerStats getEstimatedStats(String data) throws Exception {
-        return new AnalyzerStats(160000 /* disk block size in bytes */,
-                3 /* number of disk blocks */,
-                6 /* total number of rows */);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7eeeec9d/src/test/regress/helpers/DummyFragmenter.java
----------------------------------------------------------------------
diff --git a/src/test/regress/helpers/DummyFragmenter.java b/src/test/regress/helpers/DummyFragmenter.java
deleted file mode 100644
index e3c9404..0000000
--- a/src/test/regress/helpers/DummyFragmenter.java
+++ /dev/null
@@ -1,38 +0,0 @@
-import com.pivotal.pxf.api.Fragment;
-import com.pivotal.pxf.api.Fragmenter;
-import com.pivotal.pxf.api.utilities.InputData;
-
-import java.util.List;
-
-/*
- * Class that defines the splitting of a data resource into fragments that can
- * be processed in parallel
- * getFragments() returns the fragments information of a given path (source name and location of each fragment).
- * Used to get fragments of data that could be read in parallel from the different segments.
- * Dummy implementation, for documentation
- */
-public class DummyFragmenter extends Fragmenter {
-    public DummyFragmenter(InputData metaData) {
-        super(metaData);
-    }
-
-    /*
-     * path is a data source URI that can appear as a file name, a directory name  or a wildcard
-     * returns the data fragments - identifiers of data and a list of available hosts
-     */
-    @Override
-    public List<Fragment> getFragments() throws Exception {
-        String localhostname = java.net.InetAddress.getLocalHost().getHostName();
-        String[] localHosts = new String[]{localhostname, localhostname};
-        fragments.add(new Fragment(inputData.dataSource() + ".1" /* source name */,
-                localHosts /* available hosts list */,
-                "fragment1".getBytes()));
-        fragments.add(new Fragment(inputData.dataSource() + ".2" /* source name */,
-                localHosts /* available hosts list */,
-                "fragment2".getBytes()));
-        fragments.add(new Fragment(inputData.dataSource() + ".3" /* source name */,
-                localHosts /* available hosts list */,
-                "fragment3".getBytes()));
-        return fragments;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7eeeec9d/src/test/regress/helpers/DummyResolver.java
----------------------------------------------------------------------
diff --git a/src/test/regress/helpers/DummyResolver.java b/src/test/regress/helpers/DummyResolver.java
deleted file mode 100644
index d1c95b5..0000000
--- a/src/test/regress/helpers/DummyResolver.java
+++ /dev/null
@@ -1,49 +0,0 @@
-import com.pivotal.pxf.api.OneField;
-import com.pivotal.pxf.api.OneRow;
-import com.pivotal.pxf.api.ReadResolver;
-import com.pivotal.pxf.api.WriteResolver;
-import com.pivotal.pxf.api.utilities.InputData;
-import com.pivotal.pxf.api.utilities.Plugin;
-
-import java.util.LinkedList;
-import java.util.List;
-
-import static com.pivotal.pxf.api.io.DataType.INTEGER;
-import static com.pivotal.pxf.api.io.DataType.VARCHAR;
-
-
-/*
- * Class that defines the deserializtion of one record brought from the external input data.
- * Every implementation of a deserialization method (Writable, Avro, BP, Thrift, ...)
- * must inherit this abstract class
- * Dummy implementation, for documentation
- */
-public class DummyResolver extends Plugin implements ReadResolver, WriteResolver {
-    private int rowNumber;
-
-    public DummyResolver(InputData metaData) {
-        super(metaData);
-        rowNumber = 0;
-    }
-
-    @Override
-    public List<OneField> getFields(OneRow row) throws Exception {
-        /* break up the row into fields */
-        List<OneField> output = new LinkedList<OneField>();
-        String[] fields = ((String) row.getData()).split(",");
-
-        output.add(new OneField(INTEGER.getOID() /* type */, Integer.parseInt(fields[0]) /* value */));
-        output.add(new OneField(VARCHAR.getOID(), fields[1]));
-        output.add(new OneField(INTEGER.getOID(), Integer.parseInt(fields[2])));
-
-        return output;
-    }
-
-    @Override
-    public OneRow setFields(List<OneField> record) throws Exception {
-        /* should read inputStream row by row */
-        return rowNumber > 5
-                ? null
-                : new OneRow(null, "row number " + rowNumber++);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7eeeec9d/src/test/regress/helpers/HBaseChangeLookupTable.java
----------------------------------------------------------------------
diff --git a/src/test/regress/helpers/HBaseChangeLookupTable.java b/src/test/regress/helpers/HBaseChangeLookupTable.java
deleted file mode 100644
index 9428d11..0000000
--- a/src/test/regress/helpers/HBaseChangeLookupTable.java
+++ /dev/null
@@ -1,175 +0,0 @@
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.util.Bytes;
-
-import org.apache.hadoop.conf.Configuration;
-
-import org.apache.log4j.Logger;
-import org.apache.log4j.Level;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-
-
-enum HbaseLookupCommands 
-{
-	CREATE_TABLE,
-	DROP_TABLE,
-	DISABLE_TABLE,
-	ENABLE_TABLE,
-	REMOVE_CF,
-	ADD_CF,
-	TRUNCATE_TABLE;
-
-	@Override
-	public String toString() {
-		//lower case and replace _ with -
-		String s = super.toString();
-		return s.toLowerCase().replace('_', '-');
-	}
-};
-
-/*
- * Helper class for testing gphbase protocol.
- * The class will change the lookup table according to command.
- */
-class HBaseChangeLookupTable
-{
-	Configuration config;
-	HBaseAdmin admin;
-	
-	final static String lookupTableName = "pxflookup";
-	final static String lookupCfName = "mapping";
-
-	void printStep(String desc)
-	{
-		System.out.println(desc);
-	}
-
-	HBaseChangeLookupTable() throws IOException
-	{
-		config = HBaseConfiguration.create();
-		admin = new HBaseAdmin(config);
-	}
-
-	boolean tableExists() throws IOException
-	{
-		return admin.isTableAvailable(lookupTableName);
-	}
-
-	void disableTable() throws IOException
-	{
-		printStep("disable table");
-		if (!tableExists())
-			throw new IOException("table " + lookupTableName + " does not exist");
-		if (admin.isTableDisabled(lookupTableName))
-			return;
-		admin.disableTable(lookupTableName);
-	}
-	
-	void enableTable() throws IOException
-	{
-		printStep("enable table");
-		if (!tableExists())
-			throw new IOException("table " + lookupTableName + " does not exist");
-		if (admin.isTableEnabled(lookupTableName))
-			return;
-		admin.enableTable(lookupTableName);
-	}
-	
-	void createTable() throws IOException
-	{
-		if (tableExists())
-			throw new IOException("table " + lookupTableName + " already exists");
-		
-		printStep("create table");	
-		HTableDescriptor tableDescription = new HTableDescriptor(TableName.valueOf(lookupTableName));
-		tableDescription.addFamily(new HColumnDescriptor(lookupCfName));
-
-		admin.createTable(tableDescription);
-	}
-	
-	void dropTable() throws IOException
-	{
-		disableTable();
-		printStep("drop table");
-		admin.deleteTable(lookupTableName);
-	}
-
-	void truncateTable() throws IOException
-	{
-		if (!tableExists())
-			throw new IOException("table " + lookupTableName + " does not exist");
-		printStep("truncate table");
-		HTableDescriptor tableDescription = admin.getTableDescriptor(Bytes.toBytes(lookupTableName));
-		dropTable();
-		printStep("create table");
-		admin.createTable(tableDescription);
-	}
-	
-	void addCf(String columnName) throws IOException
-	{
-		HColumnDescriptor column = new HColumnDescriptor(Bytes.toBytes(columnName));
-		disableTable();
-		printStep("add column name " + columnName);
-		admin.addColumn(lookupTableName, column);
-		enableTable();
-	}
-	
-	void removeCf(String columnName) throws IOException
-	{
-		disableTable();
-		printStep("remove column name " + columnName);
-		admin.deleteColumn(lookupTableName, columnName);
-		enableTable();
-	}
-	
-
-	public static void main(String[] args) throws IOException
-	{
-		// Suppress ZK info level traces
-        Logger.getRootLogger().setLevel(Level.ERROR);
-
-        String options = "(options: " +
-        		"\n\t'create-table'" +
-        		"\n\t'drop-table'" +
-        		"\n\t'disable-table'" +
-        		"\n\t'enable-table'" +
-        		"\n\t'remove-cf'" +
-        		"\n\t'add-cf'" +
-        		"\n\t'truncate-table'";
-        
-        if (args.length < 1) 
-        {
-        	throw new IOException("No command to perform!\n" + options);
-        }
-        
-        String command = args[0];              
-        System.out.println("Command: " + Arrays.toString(args));
-        
-        HBaseChangeLookupTable runner = new HBaseChangeLookupTable();
-        
-        if (command.compareTo(HbaseLookupCommands.CREATE_TABLE.toString()) == 0)
-        	runner.createTable();
-        else if (command.compareTo(HbaseLookupCommands.DROP_TABLE.toString()) == 0)
-        	runner.dropTable();
-        else if (command.compareTo(HbaseLookupCommands.DISABLE_TABLE.toString()) == 0)
-        	runner.disableTable();
-        else if (command.compareTo(HbaseLookupCommands.ENABLE_TABLE.toString()) == 0)
-        	runner.enableTable();
-        else if (command.compareTo(HbaseLookupCommands.REMOVE_CF.toString()) == 0)
-        		runner.removeCf(args.length == 2 ? args[1] : lookupCfName);
-        else if (command.compareTo(HbaseLookupCommands.ADD_CF.toString()) == 0)
-        	runner.addCf(args.length == 2 ? args[1] : lookupCfName);
-        else if (command.compareTo(HbaseLookupCommands.TRUNCATE_TABLE.toString()) == 0)
-        	runner.truncateTable();
-        else 
-        	throw new IOException("Unkown command " + command + ".\n" + options);
-   			
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7eeeec9d/src/test/regress/helpers/HBaseCreateTables.java
----------------------------------------------------------------------
diff --git a/src/test/regress/helpers/HBaseCreateTables.java b/src/test/regress/helpers/HBaseCreateTables.java
deleted file mode 100644
index 769fe18..0000000
--- a/src/test/regress/helpers/HBaseCreateTables.java
+++ /dev/null
@@ -1,255 +0,0 @@
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.util.Bytes;
-
-import org.apache.hadoop.conf.Configuration;
-
-import org.apache.log4j.Logger;
-import org.apache.log4j.Level;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.sql.Timestamp;
-import java.util.ArrayList;
-import java.util.TimeZone;
-import java.math.BigInteger;
-
-/*
- * HBaseCreateTable creates a table named dataTableName with numberOfSplits splits in HBase.
- * Meaning it will have numberOfSplits + 1 regions.
- * Then, each split is loaded with rowsPerSplit rows using the prefix rowKeyPrefix for row key
- * and valuePrefix for value.
- */
-class HBaseCreateTable
-{
-	Configuration config;
-	HBaseAdmin admin;
-
-	String dataTableName;
-	final int numberOfSplits = 2; // use 3 regions.
-	final String columnFamilyName = "cf1";
-	final String qualifierPrefix = "ql";
-	int rowsPerSplit;
-	boolean useNull;
-	boolean upperCaseLookup;
-	String rowKeyPrefix = "row";
-	final String valuePrefix = "value";
-	final String lookupTableName = "pxflookup";
-	final String lookupTableMappingColumnFamilyName = "mapping";
-
-	void go() throws Exception
-	{
-		if (tableExists(dataTableName))
-			throw new Exception("table " + dataTableName + " already exists");
-
-		printStep("create table");
-		createTable();
-
-		printStep("populate table");
-		ArrayList<Put> rows = generateRows();
-		populateTable(rows);
-
-		printStep("update lookup table");
-		updateLookup();
-
-		printStep("leave");
-	}
-
-	HBaseCreateTable(String tableName, int rowsPerSplit, boolean useNull, boolean upperCaseLookup) throws Exception
-	{
-		dataTableName = tableName;
-		this.rowsPerSplit = rowsPerSplit;
-		this.useNull = useNull;
-		this.upperCaseLookup = upperCaseLookup;
-		
-		config = HBaseConfiguration.create();
-		admin = new HBaseAdmin(config);
-	}
-
-	void setRowKeyPrefix(String rowKeyPrefix)
-	{
-		this.rowKeyPrefix = rowKeyPrefix;
-	}
-	
-	boolean tableExists(String tableName) throws IOException
-	{
-		return admin.isTableAvailable(tableName);
-	}
-
-	void createTable() throws IOException
-	{
-		String[] splits = generateSplits();
-
-		HTableDescriptor tableDescription = new HTableDescriptor(TableName.valueOf(dataTableName));
-		tableDescription.addFamily(new HColumnDescriptor(columnFamilyName));
-
-		admin.createTable(tableDescription,
-						  Bytes.toByteArrays(splits));
-	}
-
-	String[] generateSplits()
-	{
-		String[] splits = new String[numberOfSplits];
-
-		for (int i = 0; i < numberOfSplits; ++i)
-			splits[i] = String.format("%s%08d", rowKeyPrefix, (i + 1) * rowsPerSplit);
-
-		return splits;
-	}
-
-	ArrayList<Put> generateRows() throws java.io.UnsupportedEncodingException
-	{
-		byte[] columnFamily = Bytes.toBytes(columnFamilyName);
-		ArrayList<Put> rows = new ArrayList<Put>();
-		final BigInteger ten = BigInteger.valueOf(10);
-
-		for (int splitIndex = 0; splitIndex < (numberOfSplits + 1); ++splitIndex)
-		{
-			for (int i = 0; i < rowsPerSplit; ++i)
-			{
-				//Row Key
-				String rowKey = String.format("%s%08d", rowKeyPrefix, i + splitIndex * rowsPerSplit);
-				Put newRow = new Put(Bytes.toBytes(rowKey));
-
-				//Qualifier 1. regular ascii string
-				if ((!useNull) || (i%2==0))
-					addValue(newRow, columnFamily, "q1", String.format("ASCII%08d", i));
-
-				//Qualifier 2. multibyte utf8 string.
-                addValue(newRow, columnFamily, "q2", String.format("UTF8_計算機用語_%08d", i).getBytes());
-
-				//Qualifier 3. integer value.
-                if ((!useNull) || (i%3==0))
-                	addValue(newRow, columnFamily, "q3", String.format("%08d", 1 + i + splitIndex * rowsPerSplit));
-
-				//Qualifier 4. regular ascii (for a lookup table redirection)
-                addValue(newRow, columnFamily, "q4", String.format("lookup%08d", i * 2));
-
-                //Qualifier 5. real (float)
-                addValue(newRow, columnFamily, "q5", String.format("%d.%d", i, i));
-
-                //Qualifier 6. float (double)
-                addValue(newRow, columnFamily, "q6", String.format("%d%d%d%d.%d", i, i, i, i, i));
-
-                //Qualifier 7. bpchar (char)
-                addValue(newRow, columnFamily, "q7", String.format("%c", (i + 32) % Character.MAX_VALUE));
-
-                //Qualifier 8. smallint (short)
-                addValue(newRow, columnFamily, "q8", String.format("%d", i));
-
-                //Qualifier 9. bigint (long)
-                Long value9 = ((i * i * i * 10000000000L + i) % Long.MAX_VALUE) * (long)Math.pow(-1, i % 2);
-                addValue(newRow, columnFamily, "q9", value9.toString());
-
-				//Qualifier 10. boolean
-				addValue(newRow, columnFamily, "q10", Boolean.toString((i % 2) == 0));
-
-				//Qualifier 11. numeric (string)
-				addValue(newRow, columnFamily, "q11", (ten.pow(i)).toString());
-
-				//Qualifier 12. Timestamp
-				//Removing system timezone so tests will pass anywhere in the world :)
-				int timeZoneOffset = TimeZone.getDefault().getRawOffset();
-				addValue(newRow, columnFamily, "q12", (new Timestamp(6000 * i - timeZoneOffset)).toString());
-
-				rows.add(newRow);
-			}
-		}
-
-		return rows;
-	}
-
-    void addValue(Put row, byte[] cf, String ql, byte[] value)
-    {
-        row.add(cf, ql.getBytes(), value);
-    }
-
-    void addValue(Put row, byte[] cf, String ql, String value) throws java.io.UnsupportedEncodingException
-    {
-        addValue(row, cf, ql, value.getBytes("UTF-8"));
-    }
-
-	void populateTable(ArrayList<Put> rows) throws IOException
-	{
-		HTable table = new HTable(config, dataTableName);
-		table.put(rows);
-		table.close();
-	}
-
-	void printStep(String desc)
-	{
-		System.out.println(desc);
-	}
-
-	void updateLookup() throws Exception
-	{
-		if (!tableExists(lookupTableName))
-			createLookupTable();
-
-		HTable lookup = new HTable(config, lookupTableName);
-		lookup.put(newMapping());
-		lookup.close();
-	}
-
-	void createLookupTable() throws IOException
-	{
-		HTableDescriptor tableDescription = new HTableDescriptor(TableName.valueOf(lookupTableName));
-		tableDescription.addFamily(new HColumnDescriptor(lookupTableMappingColumnFamilyName));
-
-		admin.createTable(tableDescription);
-	}
-
-	Put newMapping() throws IOException
-	{
-		String key;
-		if (upperCaseLookup)
-			key = "Q4";
-		else
-			key = "q4";
-		
-		Put mapping = new Put(Bytes.toBytes(dataTableName));
-		mapping.add(Bytes.toBytes(lookupTableMappingColumnFamilyName),
-					 Bytes.toBytes(key),
-					 Bytes.toBytes(columnFamilyName + ":q4"));
-
-		return mapping;
-	}
-}
-
-class HBaseCreateTables 
-{
-	
-	public static void main(String[] args) throws Exception
-	{
-		// Suppress ZK info level traces
-        Logger.getRootLogger().setLevel(Level.ERROR);
-        
-        System.out.println("Create table 'gphbase_test', with 100 rows per split");
-        HBaseCreateTable createTable = 
-				new HBaseCreateTable("gphbase_test", 100, false, false);
-		createTable.go();
-		
-		System.out.println("Create table 'gphbase_test_null', with 5 rows per split and null values");
-        createTable = 
-				new HBaseCreateTable("gphbase_test_null", 5, true, false);
-		createTable.go();
-		
-		System.out.println("Create table 'gphbase_test_upper', with 2 rows per split and key in upper case in lookup table");
-        createTable = 
-				new HBaseCreateTable("gphbase_test_upper", 2, true, true);
-		createTable.go();
-		
-		System.out.println("Create table 'gphbase_test_integer_rowkey', with integer rowkey and 50 rows per split");
-        createTable = 
-				new HBaseCreateTable("gphbase_test_integer_rowkey", 50, false, false);
-        createTable.setRowKeyPrefix("");
-		createTable.go();
-       
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7eeeec9d/src/test/regress/helpers/HBaseDropTables.java
----------------------------------------------------------------------
diff --git a/src/test/regress/helpers/HBaseDropTables.java b/src/test/regress/helpers/HBaseDropTables.java
deleted file mode 100644
index 3bc52c5..0000000
--- a/src/test/regress/helpers/HBaseDropTables.java
+++ /dev/null
@@ -1,100 +0,0 @@
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.util.Bytes;
-
-import org.apache.hadoop.conf.Configuration;
-
-import org.apache.log4j.Logger;
-import org.apache.log4j.Level;
-
-import java.io.IOException;
-import java.util.ArrayList;
-
-/*
- * Helper class for testing gphbase protocol.
- * The class will drop the table tableName from HBase.
- */
-class HBaseDropTable
-{
-	Configuration config;
-	HBaseAdmin admin;
-	String tableName;
-	
-	final String lookupTableName = "pxflookup";
-
-	void go() throws Exception
-	{
-		if (!tableExists(tableName))
-			throw new Exception("table " + tableName + " does not exist");
-
-		printStep("drop");
-		dropTable();
-
-		printStep("lookup cleanup");
-		cleanupLookup();
-
-		printStep("leave");
-	}
-
-	void printStep(String desc)
-	{
-		System.out.println(desc);
-	}
-
-	HBaseDropTable(String tableName) throws Exception
-	{
-		this.tableName = tableName;
-		config = HBaseConfiguration.create();
-		admin = new HBaseAdmin(config);
-	}
-
-	boolean tableExists(String tableName) throws IOException
-	{
-		return admin.isTableAvailable(tableName);
-	}
-
-	void dropTable() throws IOException
-	{
-		admin.disableTable(tableName);
-		admin.deleteTable(tableName);
-	}
-
-	void cleanupLookup() throws Exception
-	{
-		if (!tableExists(lookupTableName))
-			throw new Exception("Lookup table does not exist!");
-
-		HTable lookup = new HTable(config, lookupTableName);
-		Delete mapping = new Delete(Bytes.toBytes(tableName));
-		lookup.delete(mapping);
-		lookup.close();
-	}
-}
-
-class HBaseDropTables
-{
-	public static void main(String[] args) throws Exception
-	{
-		// Suppress ZK info level traces
-        Logger.getRootLogger().setLevel(Level.ERROR);
-
-        System.out.println("table name to drop is 'gphbase_test'");
-		HBaseDropTable dropTable = new HBaseDropTable("gphbase_test");
-		dropTable.go();
-		
-		System.out.println("table name to drop is 'gphbase_test_null'");
-		dropTable = new HBaseDropTable("gphbase_test_null");
-		dropTable.go();
-		
-		System.out.println("table name to drop is 'gphbase_test_upper'");
-		dropTable = new HBaseDropTable("gphbase_test_upper");
-		dropTable.go();
-		
-		System.out.println("table name to drop is 'gphbase_test_integer_rowkey'");
-		dropTable = new HBaseDropTable("gphbase_test_integer_rowkey");
-		dropTable.go();
-        
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7eeeec9d/src/test/regress/helpers/Makefile
----------------------------------------------------------------------
diff --git a/src/test/regress/helpers/Makefile b/src/test/regress/helpers/Makefile
deleted file mode 100644
index 8f36039..0000000
--- a/src/test/regress/helpers/Makefile
+++ /dev/null
@@ -1,33 +0,0 @@
-PXF_ROOT=$(GPHD_ROOT)/pxf
-HADOOP_ROOT=$(GPHD_ROOT)/hadoop
-HADOOPCOMMON_ROOT=$(HADOOP_ROOT)/share/hadoop/common
-HADOOPCOMMONLIB_ROOT=$(HADOOP_ROOT)/share/hadoop/common/lib
-HADOOPMAPREDUCE_ROOT=$(HADOOP_ROOT)/share/hadoop/mapreduce
-
-CLASSPATH_list=\
-			   $(PXF_ROOT)/*\
-			   $(HADOOPCOMMON_ROOT)/*\
-			   $(HADOOPCOMMONLIB_ROOT)/*\
-			   $(HADOOPMAPREDUCE_ROOT)/*\
-
-
-CLASSPATH=$(subst : ,:,$(strip $(foreach j, $(CLASSPATH_list), $j:)))
-
-%.class: %.java
-	javac -cp $(CLASSPATH) $^
-
-CustomAvroSequence: CustomAvroSequence.class CustomAvroRecInSequence.class
-	java -cp $(CLASSPATH) $@ . avro_inside_sequence.tbl 1 ../data/pxf/regressPXFCustomAvro.avsc
-
-CustomAvroFile: CustomAvroFile.class CustomAvroRecInFile.class
-	java -cp $(CLASSPATH) $@ . avroformat_inside_avrofile.avro 1 ../data/pxf/regressPXFCustomAvro.avsc
-
-CustomWritableSequence: CustomWritable.class CustomWritableSequence.class
-	java -cp $(CLASSPATH) $@ . writable_inside_sequece.tbl 1
-
-clean:
-	rm *.class
-	rm *~
-	rm avro_inside_sequence.tbl
-	rm avroformat_inside_avrofile.avro
-	rm writable_inside_sequece.tbl

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7eeeec9d/src/test/regress/helpers/README.txt
----------------------------------------------------------------------
diff --git a/src/test/regress/helpers/README.txt b/src/test/regress/helpers/README.txt
deleted file mode 100644
index 38e23be..0000000
--- a/src/test/regress/helpers/README.txt
+++ /dev/null
@@ -1,42 +0,0 @@
-Avro Producers
---------------
-
-AvroFile producers
-CustomAvroFile.java
-CustomAvroRecInFile.java
-
-Avro inside SequenceFile
-CustomAvroSequence.java
-CustomAvroRecInSequence.java
-
-These will use ../data/pxf/regressPXFCustomAvro.avsc as schema
-(parameter on Makefile)
-
-To create an AvroFile
-make CustomAvroFile
-
-To create an Avro inside SequenceFile
-make CustomAvroSequence
-
-Output will be written to avro_inside_sequence.tbl
-and avroformat_inside_avrofile.avro
-To use them, copy them over to ../data/pxf
-
-Make sure your environment contains the following:
-GPHD_ROOT where you have PXF and Hadoop
-
-CustomWritable Producer
------------------------
-
-CustomWritable.java
-CustomWritableSequence.java
-
-To create the SequenceFile
-make CustomWritableSequence
-
-Output will be written to writable_inside_sequece.tbl
-To use it, copy CustomWritable.class and 
-writable_inside_sequece.tbl to ../data/pxf
-
-Make sure your environment contains the following:
-GPHD_ROOT where you have PXF and Hadoop

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7eeeec9d/src/test/regress/helpers/create_table_file.sh
----------------------------------------------------------------------
diff --git a/src/test/regress/helpers/create_table_file.sh b/src/test/regress/helpers/create_table_file.sh
deleted file mode 100644
index 366b16e..0000000
--- a/src/test/regress/helpers/create_table_file.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash
-
-if [ $# -ne 1 ] ; then
-  echo "usage: sh create_table_file.sh <file path>"
-  exit 1
-fi
-
-echo "creating file $1"
-file=$1
-tmp_file=${1}_tmp
-
-for i in {1..1000}; do echo "t$i,$i"; done > $file
-for i in {1..15}; do cat $file $file > $tmp_file && mv $tmp_file $file; done

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7eeeec9d/src/test/regress/input/pxf_hbase.source
----------------------------------------------------------------------
diff --git a/src/test/regress/input/pxf_hbase.source b/src/test/regress/input/pxf_hbase.source
deleted file mode 100644
index 509fd0b..0000000
--- a/src/test/regress/input/pxf_hbase.source
+++ /dev/null
@@ -1,202 +0,0 @@
---
--- PXF HBASE regression suite 
---
--- Prerequisites:
---
---   Must have a running hdfs with REST service on port 50070
---   Must have a running ZooKeeper service
---   Must have a running HBase service
---   Must have HADOOP_ROOT, HBASE_ROOT, HIVE_ROOT and ZOOKEEPER_ROOT set.
---
--- TODO: test gpdbwritable write/read when it is enabled.
--- TODO: test PB, AVRO, THRIFT when it is enabled (read only, with pre formatted files).
--- TODO: test protocol validator for pxf once written.
--- TODO: test parameter passing, filter passing
-
--- start_matchsubs
---                                                                                               
--- # create a match/subs expression to handle file name and line number in error messages
---
--- m/\w+\.c:\d+/
--- s/\w+\.c:\d+/SOME_FILE:SOME_LINE/
---
--- # create a match/subs expression to handle ip addresses that change
---
--- m/(ERROR|WARNING):.*remote component error.*\(\d+\).*from.*'\d+\.\d+\.\d+\.\d+:\d+'.*/
--- s/'\d+\.\d+\.\d+\.\d+:\d+'/'SOME_IP:SOME_PORT'/
---
--- end_matchsubs
-
---------------------------------------------------------------------------------
--- HBASE
---------------------------------------------------------------------------------
---
--- syntax validations
---
-CREATE READABLE EXTERNAL TABLE gphbase_in(a int, b text, c bytea)
-LOCATION ('pxf://@hostname@:50070/hbasetable')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import'); -- negative
-
-CREATE WRITABLE EXTERNAL TABLE gphbase_out(a int, b text, c bytea)
-LOCATION ('pxf://@hostname@:50070/hbasetable?PROFILE=HBase')
-FORMAT 'CUSTOM' (formatter='gpdbwritable_export'); -- positive
-
-DROP EXTERNAL TABLE gphbase_out;
-
--- Setup
-\! javac -cp `${HADOOP_ROOT}/bin/hadoop classpath`:`echo ${HBASE_ROOT}/lib/log4j-*.jar` @abs_srcdir@/helpers/HBaseCreateTables.java @abs_srcdir@/helpers/HBaseDropTables.java @abs_srcdir@/helpers/HBaseChangeLookupTable.java
-\! java -cp  `${HADOOP_ROOT}/bin/hadoop classpath`:`echo ${HBASE_ROOT}/lib/log4j-*.jar`:`echo ${HADOOP_ROOT}/lib/commons-logging-[1-9]*.jar`:`echo ${HADOOP_ROOT}/lib/commons-configuration-[1-9]*.jar`:`echo ${HADOOP_ROOT}/lib/commons-lang-[1-9]*.jar`:@abs_srcdir@/helpers HBaseCreateTables
-
-SET pxf_enable_filter_pushdown = on;
--- Test
--- "cf1:q7" changed from CHAR to BYTEA due to sort order difference between RHEL and OSX
-CREATE EXTERNAL TABLE gphbase_select(recordkey TEXT, 
-                                     "cf1:q1" VARCHAR, 
-                                     "cf1:q2" TEXT, 
-                                     "cf1:q3" INT, 
-                                     q4 BYTEA, 
-                                     "cf1:q5" REAL, 
-                                     "cf1:q6" FLOAT, 
-                                     "cf1:q7" BYTEA, 
-                                     "cf1:q8" SMALLINT, 
-                                     "cf1:q9" BIGINT,
-									 "cf1:q10" BOOLEAN,
-									 "cf1:q11" NUMERIC,
-									 "cf1:q12" TIMESTAMP) 
-LOCATION ('pxf://@hostname@:50070/gphbase_test?PROFILE=HBase')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import');
-
--- Test unsupported data type
-CREATE EXTERNAL TABLE gphbase_unsupported_type(recordkey TEXT, 
-                                     "cf1:q1" VARCHAR, 
-                                     "cf1:q2" TEXT, 
-                                     "cf1:q3" INT, 
-                                     q4 BYTEA, 
-                                     "cf1:q5" REAL, 
-                                     "cf1:q6" FLOAT, 
-                                     "cf1:q7" BYTEA, 
-                                     "cf1:q8" POINT, -- the unsupported type 
-                                     "cf1:q9" BIGINT,
-									 "cf1:q10" BOOLEAN,
-									 "cf1:q11" NUMERIC,
-									 "cf1:q12" TIMESTAMP) 
-LOCATION ('pxf://@hostname@:50070/gphbase_test?PROFILE=HBase')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import');
-SELECT * FROM gphbase_unsupported_type;
-
--- Test analyze for HBase table.
-ANALYZE gphbase_select;
-select relpages, reltuples from pg_class where relname = 'gphbase_select';
-
-SELECT * FROM gphbase_select ORDER BY recordkey ASC;
-SELECT cnt < 300 AS check FROM (SELECT COUNT(*) AS cnt FROM gphbase_select WHERE gp_segment_id = 0) AS a;
-SELECT * FROM gphbase_select WHERE recordkey > 'row00000090' AND recordkey <= 'row00000103' ORDER BY recordkey ASC;
-SELECT * FROM gphbase_select WHERE recordkey = 'row00000100';
-SELECT * FROM gphbase_select WHERE recordkey != 'row00000090' AND recordkey <= 'row00000103' ORDER BY recordkey ASC;
-SELECT * FROM gphbase_select WHERE recordkey != 'row00000090' AND recordkey <= 'row00000095' AND "cf1:q7" > 'o' ORDER BY recordkey ASC;
-SELECT * FROM gphbase_select WHERE "cf1:q1" > 'ASCII00000090' AND q4 <= 'lookup00000198' ORDER BY recordkey ASC;
-SELECT * FROM gphbase_select WHERE "cf1:q2" > 'UTF8_計算機用語_00000090' AND "cf1:q3" <= 990000 ORDER BY recordkey ASC;
-SELECT * FROM gphbase_select WHERE "cf1:q5" > 91.92 AND "cf1:q6" <= 99999999.99 ORDER BY recordkey ASC;
-SELECT * FROM gphbase_select WHERE "cf1:q8" > 97 AND "cf1:q9" <= 9702990000000099 ORDER BY recordkey ASC;
-SELECT * FROM gphbase_select WHeRE "cf1:q9" < -7000000000000000 ORDER BY recordkey ASC;
-SELECT * FROM gphbase_select WHERE recordkey > 'row00000090' AND recordkey <= 'row00000103' OR recordkey = 'row00000105' ORDER BY recordkey ASC;
-SELECT * FROM gphbase_select WHERE recordkey != 'row00000099' AND "cf1:q8" > 97 AND "cf1:q9" <= 9702990000000099 ORDER BY recordkey ASC;
-SELECT * FROM gphbase_select WHERE "cf1:q9" <= 9702990000000099 AND recordkey != 'row00000099' AND "cf1:q8" > 97 ORDER BY recordkey ASC;
-SET pxf_enable_filter_pushdown = off;
-SELECT * FROM gphbase_select WHERE "cf1:q9" <= 9702990000000099 AND recordkey != 'row00000099' AND "cf1:q8" > 97 ORDER BY recordkey ASC;
-SET pxf_enable_filter_pushdown = on;
-DROP EXTERNAL TABLE gphbase_select;
--- Test null values
-CREATE EXTERNAL TABLE gphbase_null(recordkey TEXT, 
-                                     "cf1:q1" VARCHAR, 
-                                     "cf1:q2" TEXT, 
-                                     "cf1:q3" INT, 
-                                     q4 BYTEA, 
-                                     "cf1:q5" REAL, 
-                                     "cf1:q6" FLOAT, 
-                                     "cf1:q7" CHAR, 
-                                     "cf1:q8" SMALLINT, 
-                                     "cf1:q9" BIGINT,
-									 "cf1:q10" BOOLEAN,
-									 "cf1:q11" NUMERIC,
-									 "cf1:q12" TIMESTAMP) 
-LOCATION ('pxf://@hostname@:50070/gphbase_test_null?PROFILE=HBase')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import');
-SELECT * FROM gphbase_null WHERE "cf1:q1" is null ORDER BY recordkey ASC;
-SELECT * FROM gphbase_null WHERE "cf1:q3" is null ORDER BY recordkey ASC;
-DROP EXTERNAL TABLE gphbase_null;
--- Test upper case key in lookup table
-CREATE EXTERNAL TABLE gphbase_upper(recordkey TEXT, 
-                                     "cf1:q1" VARCHAR, 
-                                     "cf1:q2" TEXT, 
-                                     "cf1:q3" INT, 
-                                     q4 BYTEA, 
-                                     "cf1:q5" REAL, 
-                                     "cf1:q6" FLOAT, 
-                                     "cf1:q7" CHAR, 
-                                     "cf1:q8" SMALLINT, 
-                                     "cf1:q9" BIGINT,
-									 "cf1:q10" BOOLEAN,
-									 "cf1:q11" NUMERIC,
-									 "cf1:q12" TIMESTAMP) 
-LOCATION ('pxf://@hostname@:50070/gphbase_test_upper?PROFILE=HBase')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import');
-SELECT * FROM gphbase_upper ORDER BY recordkey ASC;
-DROP EXTERNAL TABLE gphbase_upper;
--- Negative test
-CREATE EXTERNAL TABLE gphbase_error("cf1:q1" INT, -- wrong column type
-                                    recordkey TEXT)
-LOCATION ('pxf://@hostname@:50070/gphbase_test?PROFILE=HBase')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import');
-SELECT * FROM gphbase_error ORDER BY recordkey ASC;
-DROP EXTERNAL TABLE gphbase_error;
--- query without lookup table
-CREATE EXTERNAL TABLE gphbase_lookup(recordkey TEXT, 
-                                     "cf1:q1" VARCHAR, 
-                                     "cf1:q2" TEXT, 
-                                     "cf1:q3" INT, 
-                                     "cf1:q4" BYTEA, 
-                                     "cf1:q5" REAL, 
-                                     "cf1:q6" FLOAT, 
-                                     "cf1:q7" CHAR, 
-                                     "cf1:q8" SMALLINT, 
-                                     "cf1:q9" BIGINT,
-									 "cf1:q10" BOOLEAN,
-									 "cf1:q11" NUMERIC,
-									 "cf1:q12" TIMESTAMP) 
-LOCATION ('pxf://@hostname@:50070/gphbase_test?PROFILE=HBase')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import');
--- truncate lookup table
-\! java -cp `${HADOOP_ROOT}/bin/hadoop classpath`:`echo ${HBASE_ROOT}/lib/log4j-*.jar`:`echo ${HADOOP_ROOT}/lib/commons-logging-[1-9]*.jar`:`echo ${HADOOP_ROOT}/lib/commons-configuration-[1-9]*.jar`:`echo ${HADOOP_ROOT}/lib/commons-lang-[1-9]*.jar`:@abs_srcdir@/helpers HBaseChangeLookupTable truncate-table
-SELECT recordkey, "cf1:q1" FROM gphbase_lookup ORDER BY recordkey LIMIT 5;
--- disable lookup table
-\! java -cp `${HADOOP_ROOT}/bin/hadoop classpath`:`echo ${HBASE_ROOT}/lib/log4j-*.jar`:`echo ${HADOOP_ROOT}/lib/commons-logging-[1-9]*.jar`:`echo ${HADOOP_ROOT}/lib/commons-configuration-[1-9]*.jar`:`echo ${HADOOP_ROOT}/lib/commons-lang-[1-9]*.jar`:@abs_srcdir@/helpers HBaseChangeLookupTable disable-table
-SELECT recordkey, "cf1:q1" FROM gphbase_lookup ORDER BY recordkey LIMIT 5;
--- remove mapping cf from lookup table
-\! java -cp `${HADOOP_ROOT}/bin/hadoop classpath`:`echo ${HBASE_ROOT}/lib/log4j-*.jar`:`echo ${HADOOP_ROOT}/lib/commons-logging-[1-9]*.jar`:`echo ${HADOOP_ROOT}/lib/commons-configuration-[1-9]*.jar`:`echo ${HADOOP_ROOT}/lib/commons-lang-[1-9]*.jar`:@abs_srcdir@/helpers HBaseChangeLookupTable remove-cf
-SELECT recordkey, "cf1:q1" FROM gphbase_lookup ORDER BY recordkey LIMIT 5;
--- drop lookup table
-\! java -cp `${HADOOP_ROOT}/bin/hadoop classpath`:`echo ${HBASE_ROOT}/lib/log4j-*.jar`:`echo ${HADOOP_ROOT}/lib/commons-logging-[1-9]*.jar`:`echo ${HADOOP_ROOT}/lib/commons-configuration-[1-9]*.jar`:`echo ${HADOOP_ROOT}/lib/commons-lang-[1-9]*.jar`:@abs_srcdir@/helpers HBaseChangeLookupTable drop-table
-SELECT recordkey, "cf1:q1" FROM gphbase_lookup ORDER BY recordkey LIMIT 5;
--- create lookup table
-\! java -cp `${HADOOP_ROOT}/bin/hadoop classpath`:`echo ${HBASE_ROOT}/lib/log4j-*.jar`:`echo ${HADOOP_ROOT}/lib/commons-logging-[1-9]*.jar`:`echo ${HADOOP_ROOT}/lib/commons-configuration-[1-9]*.jar`:`echo ${HADOOP_ROOT}/lib/commons-lang-[1-9]*.jar`:@abs_srcdir@/helpers HBaseChangeLookupTable create-table
-DROP EXTERNAL TABLE gphbase_lookup;
--- query a table with rowkey as integer (HD-2610)
-CREATE EXTERNAL TABLE gphbase_integer_rowkey(recordkey INTEGER, 
-                                     "cf1:q1" VARCHAR, 
-                                     "cf1:q2" TEXT, 
-                                     "cf1:q3" INT, 
-                                     "cf1:q4" BYTEA, 
-                                     "cf1:q5" REAL, 
-                                     "cf1:q6" FLOAT, 
-                                     "cf1:q7" CHAR, 
-                                     "cf1:q8" SMALLINT, 
-                                     "cf1:q9" BIGINT) 
-LOCATION ('pxf://@hostname@:50070/gphbase_test_integer_rowkey?PROFILE=HBase')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import');
-SET pxf_enable_filter_pushdown = on;
-SELECT * FROM gphbase_integer_rowkey WHERE recordkey = 50;
-SELECT * FROM gphbase_integer_rowkey WHERE recordkey <= 30 OR recordkey > 145 ORDER BY recordkey;
-DROP EXTERNAL TABLE gphbase_integer_rowkey;
--- Cleanup
-\! java -cp `${HADOOP_ROOT}/bin/hadoop classpath`:`echo ${HBASE_ROOT}/lib/log4j-*.jar`:`echo ${HADOOP_ROOT}/lib/commons-logging-[1-9]*.jar`:`echo ${HADOOP_ROOT}/lib/commons-configuration-[1-9]*.jar`:`echo ${HADOOP_ROOT}/lib/commons-lang-[1-9]*.jar`:@abs_srcdir@/helpers HBaseDropTables

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7eeeec9d/src/test/regress/input/pxf_hdfs.source
----------------------------------------------------------------------
diff --git a/src/test/regress/input/pxf_hdfs.source b/src/test/regress/input/pxf_hdfs.source
deleted file mode 100644
index d0176ad..0000000
--- a/src/test/regress/input/pxf_hdfs.source
+++ /dev/null
@@ -1,560 +0,0 @@
---
--- PXF HDFS regression suite 
---
--- Prerequisites:
---
---   Must have a running hdfs with REST service on port 50070
---   Must have HADOOP_ROOT, HBASE_ROOT, HIVE_ROOT and ZOOKEEPER_ROOT set.
---
--- TODO: test gpdbwritable write/read when it is enabled.
--- TODO: test PB, AVRO, THRIFT when it is enabled (read only, with pre formatted files).
--- TODO: test protocol validator for pxf once written.
--- TODO: test parameter passing, filter passing
-
--- start_matchsubs
---                                                                                               
--- # create a match/subs expression to handle ip addresses that change
---
--- m/(ERROR|WARNING):.*remote component error.*\(\d+\).*from.*'\d+\.\d+\.\d+\.\d+:\d+'.*/
--- s/'\d+\.\d+\.\d+\.\d+:\d+'/'SOME_IP:SOME_PORT'/
---
--- m/(remote component error \(0\): Failed connect to @hostname@:12345; Connection refused|remote component error \(0\): couldn't connect to host).*/
--- s/(Failed connect to @hostname@:12345; Connection refused|couldn't connect to host)/CURL_CON_ERROR/
---
--- end_matchsubs
--- start_matchignore
---
--- m/.*Unable to load native-hadoop library for your platform.*/
---
--- end_matchignore
-
---
--- syntax validations
---
-CREATE READABLE EXTERNAL TABLE gphdfs_in(a int, b text, c bytea)
-LOCATION ('pxf://@hostname@:50070/somepath/gpdb_regression_data?FRAGMENTER=xfrag&ACCESSOR=xacc&RESOLVER=xres&someuseropt=someuserval')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import'); -- positive
-
-CREATE READABLE EXTERNAL TABLE gphdfs_in1(a int, b text, c bytea)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data?FRAGMENTER=xfrag&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.AvroResolver&DATA-SCHEMA=MySchema')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import'); -- positive
-
-CREATE READABLE EXTERNAL TABLE gphdfs_in2(a int, b text, c bytea)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/*')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import'); -- negative
-
-CREATE READABLE EXTERNAL TABLE gphdfs_in2(a int, b text, c bytea)
-LOCATION ('pxf://@hostname@:50070/somepath/gpdb_regression_data?ACCESSOR=xacc&RESOLVER=xres&someuseropt=someuserval')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import'); -- negative, missing fragmenter
-
-CREATE READABLE EXTERNAL TABLE gphdfs_in2(a int, b text, c bytea)
-LOCATION ('pxf://@hostname@:50070/somepath/gpdb_regression_data?FRAGMENTER=xfrag&RESOLVER=xres&someuseropt=someuserval')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import'); -- negative, missing accessor
-
-CREATE READABLE EXTERNAL TABLE gphdfs_in2(a int, b text, c bytea)
-LOCATION ('pxf://@hostname@:50070/somepath/gpdb_regression_data?FRAGMENTER=xfrag&ACCESSOR=xacc')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import'); -- negative, missing resolver
-
-DROP EXTERNAL TABLE gphdfs_in;
-DROP EXTERNAL TABLE gphdfs_in1;
-
---
--- Load HDFS with test data
---
-\! ${HADOOP_ROOT}/bin/hadoop fs -mkdir /gpdb_regression_data
-\! ${HADOOP_ROOT}/bin/hadoop fs -copyFromLocal @abs_srcdir@/data/pxf/writable_inside_sequence1.tbl /gpdb_regression_data/writable_inside_sequence.tbl
-\! ${HADOOP_ROOT}/bin/hadoop fs -copyFromLocal @abs_srcdir@/data/pxf/avro_inside_sequence.tbl /gpdb_regression_data/avro_inside_sequence.tbl; 
-\! ${HADOOP_ROOT}/bin/hadoop fs -copyFromLocal @abs_srcdir@/data/pxf/avroformat_inside_avrofile.avro /gpdb_regression_data/avroformat_inside_avrofile.avro
-\! ${HADOOP_ROOT}/bin/hadoop fs -copyFromLocal @abs_srcdir@/data/pxf/text_data.csv /gpdb_regression_data/text_data.csv
-\! ${HADOOP_ROOT}/bin/hadoop fs -mkdir /gpdb_regression_data/wild
-\! ${HADOOP_ROOT}/bin/hadoop fs -copyFromLocal @abs_srcdir@/data/pxf/writable_inside_sequence1.tbl /gpdb_regression_data/wild/writable_inside_sequence1.tbl
-\! ${HADOOP_ROOT}/bin/hadoop fs -copyFromLocal @abs_srcdir@/data/pxf/writable_inside_sequence2.tbl /gpdb_regression_data/wild/writable_inside_sequence2.tbl
-\! ${HADOOP_ROOT}/bin/hadoop fs -copyFromLocal @abs_srcdir@/data/pxf/empty.tbl /gpdb_regression_data/empty.tbl
-\! ${HADOOP_ROOT}/bin/hadoop fs -copyFromLocal @abs_srcdir@/data/pxf/small.csv /gpdb_regression_data/small.csv
---
--- Test TEXT format on a file with many fields
---
-CREATE EXTERNAL TABLE bigtext (s1 text, 
-                               s2 text, 
-							   s3 text, 
-							   d1 timestamp, 
-							   n1 int, 
-							   n2 int, 
-							   n3 int, 
-							   n4 int, 
-							   n5 int, 
-							   n6 int, 
-							   n7 int,
-                               s11 text, 
-							   s12 text, 
-							   s13 text, 
-							   d11 timestamp, 
-							   n11 int, 
-							   n12 int, 
-							   n13 int, 
-							   n14 int, 
-							   n15 int, 
-							   n16 int, 
-							   n17 int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/text_data.csv?PROFILE=HdfsTextSimple')
-FORMAT 'TEXT' (DELIMITER ',');
-SELECT n1, n2, n3, n4, n5, s1, s2, s3, d1 FROM bigtext ORDER BY n1;
-SELECT n11, n12, n13, n14, n15, s11, s12, s13, d11 FROM bigtext ORDER BY n11;
-DROP EXTERNAL TABLE bigtext;
---
--- Test CSV format on a file with many fields
---
-CREATE EXTERNAL TABLE bigcsv (s1 text, 
-                               s2 text, 
-							   s3 text, 
-							   d1 timestamp, 
-							   n1 int, 
-							   n2 int, 
-							   n3 int, 
-							   n4 int, 
-							   n5 int, 
-							   n6 int, 
-							   n7 int,
-                               s11 text, 
-							   s12 text, 
-							   s13 text, 
-							   d11 timestamp, 
-							   n11 int, 
-							   n12 int, 
-							   n13 int, 
-							   n14 int, 
-							   n15 int, 
-							   n16 int, 
-							   n17 int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/text_data.csv?PROFILE=HdfsTextSimple')
-FORMAT 'CSV';
-SELECT n1, n2, n3, n4, n5, s1, s2, s3, d1 FROM bigcsv ORDER BY n1;
-SELECT n11, n12, n13, n14, n15, s11, s12, s13, d11 FROM bigcsv ORDER BY n11;
-DROP EXTERNAL TABLE bigcsv;
---
--- Test TEXT format on a file with many fields with deprecated ACCESSOR TextFileAccessor and deprecated RESOLVER TextResolver
---
-CREATE EXTERNAL TABLE bigtext (s1 text, 
-                               s2 text, 
-							   s3 text, 
-							   d1 timestamp, 
-							   n1 int, 
-							   n2 int, 
-							   n3 int, 
-							   n4 int, 
-							   n5 int, 
-							   n6 int, 
-							   n7 int,
-                               s11 text, 
-							   s12 text, 
-							   s13 text, 
-							   d11 timestamp, 
-							   n11 int, 
-							   n12 int, 
-							   n13 int, 
-							   n14 int, 
-							   n15 int, 
-							   n16 int, 
-							   n17 int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/text_data.csv?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.TextFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.TextResolver')
-FORMAT 'TEXT' (DELIMITER ',');
-SELECT n1, n2, n3, n4, n5, s1, s2, s3, d1 FROM bigtext ORDER BY n1;
-SELECT n11, n12, n13, n14, n15, s11, s12, s13, d11 FROM bigtext ORDER BY n11;
-DROP EXTERNAL TABLE bigtext;
---
--- Test TEXT format on a file with many fields with deprecated ACCESSOR LineReaderAccessor
---
-CREATE EXTERNAL TABLE linereader (s1 text, 
-                                  s2 text, 
-							      s3 text, 
-							      d1 timestamp, 
-							      n1 int, 
-							      n2 int, 
-							      n3 int, 
-							      n4 int, 
-							      n5 int, 
-							      n6 int, 
-							      n7 int,
-                                  s11 text, 
-							      s12 text, 
-							      s13 text, 
-							      d11 timestamp, 
-							      n11 int, 
-							      n12 int, 
-							      n13 int, 
-							      n14 int, 
-							      n15 int, 
-							      n16 int, 
-							      n17 int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/text_data.csv?PROFILE=HdfsTextSimple')
-FORMAT 'TEXT' (DELIMITER ',');
-SELECT n1, n2, n3, n4, n5, s1, s2, s3, d1 FROM linereader ORDER BY n1;
-SELECT n11, n12, n13, n14, n15, s11, s12, s13, d11 FROM linereader ORDER BY n11;
-DROP EXTERNAL TABLE linereader;
---
--- Test Writable data inside a SequenceFile (read only).
---
-
-CREATE EXTERNAL TABLE seqwr(tmp1  timestamp, 
-                            num1  integer, 
-                            num2  integer, 
-                            num3  integer, 
-                            num4  integer,
-                            t1    text, 
-                            t2    text, 
-                            t3    text, 
-                            t4    text, 
-                            t5    text, 
-                            t6    text, 
-                            dub1  double precision, 
-                            dub2  double precision, 
-                            dub3  double precision, 
-                            ft1   real, 
-                            ft2   real, 
-                            ft3   real, 
-                            ln1   bigint, 
-                            ln2   bigint, 
-                            ln3   bigint, 
-                            bt    bytea,
-							bool1 boolean,
-							bool2 boolean,
-							bool3 boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable_inside_sequence.tbl?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT tmp1, num1, num2, num3, num4, ln1, ln2, ln3, bt, bool1, bool2, bool3 FROM seqwr ORDER BY num1;
-SELECT num1, t1, t2, t3 FROM seqwr ORDER BY num1;
-DROP EXTERNAL TABLE seqwr;
---
--- Test Avro data inside a SequenceFile (read only).
---
-CREATE EXTERNAL TABLE seqav(tmp1 timestamp, 
-                            num1  integer, 
-                            num2  integer, 
-                            num3  integer, 
-                            num4  integer,
-                            t1    text,
-                            t2    text,
-                            t3    text,
-                            t4    text,
-                            t5    text,
-                            t6    text,
-                            dub1  double precision,
-                            dub2  double precision, 
-                            dub3  double precision, 
-                            ft1   real, 
-                            ft2   real, 
-                            ft3   real, 
-                            ln1   bigint, 
-                            ln2   bigint, 
-                            ln3   bigint, 
-                            bt    bytea,
-							bl    boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/avro_inside_sequence.tbl?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.AvroResolver&DATA-SCHEMA=regressPXFCustomAvro.avsc')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT tmp1, num1, num2, num3, num4, ln1, ln2, ln3, bt, bl FROM seqav ORDER BY num1;
-SELECT num1, t1, t2, t3 FROM seqav ORDER BY num1;
-DROP EXTERNAL TABLE seqav;
---
--- Test file name with spaces.
---
-CREATE EXTERNAL TABLE seqav_space(tmp1 timestamp, 
-                            num1  integer, 
-                            num2  integer, 
-                            num3  integer, 
-                            num4  integer,
-                            t1    text,
-                            t2    text,
-                            t3    text,
-                            t4    text,
-                            t5    text,
-                            t6    text,
-                            dub1  double precision,
-                            dub2  double precision, 
-                            dub3  double precision, 
-                            ft1   real, 
-                            ft2   real, 
-                            ft3   real, 
-                            ln1   bigint, 
-                            ln2   bigint, 
-                            ln3   bigint, 
-                            bt    bytea,
-							bl    boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/avro_inside_sequence.tbl?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.AvroResolver&DATA-SCHEMA=regress PXF Custom Avro1.avsc')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT tmp1, num1, num2, num3, num4, ln1, ln2, ln3, bt, bl FROM seqav_space ORDER BY num1;
-DROP EXTERNAL TABLE seqav_space;
---
--- Test options are case insensitive
---
-CREATE EXTERNAL TABLE seqav_case(tmp1 timestamp, 
-                            num1  integer, 
-                            num2  integer, 
-                            num3  integer, 
-                            num4  integer,
-                            t1    text,
-                            t2    text,
-                            t3    text,
-                            t4    text,
-                            t5    text,
-                            t6    text,
-                            dub1  double precision,
-                            dub2  double precision, 
-                            dub3  double precision, 
-                            ft1   real, 
-                            ft2   real, 
-                            ft3   real, 
-                            ln1   bigint, 
-                            ln2   bigint, 
-                            ln3   bigint, 
-                            bt    bytea,
-							bl    boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/avro_inside_sequence.tbl?fragmenter=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&Accessor=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&ReSoLvEr=com.pivotal.pxf.plugins.hdfs.AvroResolver&Data-Schema=regressPXFCustomAvro.avsc')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT tmp1, num1, num2, num3, num4, ln1, ln2, ln3, bt, bl FROM seqav_case ORDER BY num1;
-DROP EXTERNAL TABLE seqav_case;
---
--- Test Avro data inside an AvroFile (read only).
---
-CREATE EXTERNAL TABLE avfav(tmp1 timestamp, 
-                            num1  integer, 
-                            num2  integer, 
-                            num3  integer, 
-                            num4  integer,
-                            t1    text,
-                            t2    text,
-                            t3    text,
-                            t4    text,
-                            t5    text,
-                            t6    text,
-                            dub1  double precision,
-                            dub2  double precision, 
-                            dub3  double precision, 
-                            ft1   real, 
-                            ft2   real, 
-                            ft3   real, 
-                            ln1   bigint, 
-                            ln2   bigint, 
-                            ln3   bigint, 
-                            bt    bytea,
-							bl    boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/avroformat_inside_avrofile.avro?PROFILE=Avro')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT tmp1, num1, num2, num3, num4, ln1, ln2, ln3, bt, bl FROM avfav ORDER BY num1;
-SELECT num1, t1, t2, t3 FROM avfav ORDER BY num1;
-DROP EXTERNAL TABLE avfav;
---
--- Test quoted line break
---
-CREATE EXTERNAL TABLE small_csv(num1 int, 
-                                word text, 
-                                num2 int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/small.csv?PROFILE=HdfsTextMulti')
-FORMAT 'CSV';
-SELECT * FROM small_csv ORDER BY num1;
-DROP EXTERNAL TABLE small_csv;
---
--- Test wildcards in file name
---
-CREATE EXTERNAL TABLE seqwild(tmp1  timestamp, 
-                              num1  integer, 
-                              num2  integer, 
-                              num3  integer, 
-                              num4  integer,
-                              t1    text, 
-                              t2    text, 
-                              t3    text, 
-                              t4    text, 
-                              t5    text, 
-                              t6    text, 
-                              dub1  double precision, 
-                              dub2  double precision, 
-                              dub3  double precision, 
-                              ft1   real, 
-                              ft2   real, 
-                              ft3   real, 
-                              ln1   bigint, 
-                              ln2   bigint, 
-                              ln3   bigint, 
-                              bt    bytea,
-							  bool1 boolean,
-							  bool2 boolean,
-							  bool3 boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/wild/*.tbl?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT num1, t1, t2, t3 FROM seqwild ORDER BY num1;
-DROP EXTERNAL TABLE seqwild;
-CREATE EXTERNAL TABLE seqquestion(tmp1  timestamp, 
-                              num1  integer, 
-                              num2  integer, 
-                              num3  integer, 
-                              num4  integer,
-                              t1    text, 
-                              t2    text, 
-                              t3    text, 
-                              t4    text, 
-                              t5    text, 
-                              t6    text, 
-                              dub1  double precision, 
-                              dub2  double precision, 
-                              dub3  double precision, 
-                              ft1   real, 
-                              ft2   real, 
-                              ft3   real, 
-                              ln1   bigint, 
-                              ln2   bigint, 
-                              ln3   bigint, 
-                              bt    bytea,
-							  bool1 boolean,
-							  bool2 boolean,
-							  bool3 boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/wild/writable_inside_sequence?.tbl?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT num1, t1, t2, t3 FROM seqquestion ORDER BY num1;
-DROP EXTERNAL TABLE seqquestion;
---
--- Test error in host name -- negative
---
-CREATE EXTERNAL TABLE host_err(t1 text,
-                               a1 integer)
-LOCATION ('pxf://badhostname:50070/gpdb_regression_data/multiblock.tbl?PROFILE=HdfsTextSimple')
-FORMAT 'TEXT' (DELIMITER ',');
-SELECT t1, a1 FROM host_err ORDER BY t1 LIMIT 10; -- negative
-DROP EXTERNAL TABLE host_err;
---
--- Test error in port -- negative
---
-CREATE EXTERNAL TABLE port_err(t1 text,
-                               a1 integer)
-LOCATION ('pxf://@hostname@:12345/gpdb_regression_data/multiblock.tbl?PROFILE=HdfsTextSimple')
-FORMAT 'TEXT' (DELIMITER ',');
-SELECT t1, a1 FROM port_err ORDER BY t1 LIMIT 10; -- negative
-DROP EXTERNAL TABLE port_err;
---
--- Test empty file
---
-CREATE EXTERNAL TABLE empty(t1 text,
-                            a1 integer)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/empty.tbl?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT * FROM empty ORDER BY t1;
-DROP EXTERNAL TABLE empty;
---
--- Test analyze for HDFS file(read only).
---
-CREATE EXTERNAL TABLE avfav_analyze_good(tmp1 timestamp, 
-                            num1  integer, 
-                            num2  integer, 
-                            num3  integer, 
-                            num4  integer,
-                            t1    text,
-                            t2    text,
-                            t3    text,
-                            t4    text,
-                            t5    text,
-                            t6    text,
-                            dub1  double precision,
-                            dub2  double precision, 
-                            dub3  double precision, 
-                            ft1   real, 
-                            ft2   real, 
-                            ft3   real, 
-                            ln1   bigint, 
-                            ln2   bigint, 
-                            ln3   bigint, 
-                            bt    bytea)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/avroformat_inside_avrofile.avro?PROFILE=Avro')
-FORMAT 'custom' (formatter='pxfwritable_import');
-
--- Table that points to a wrong port.
-CREATE EXTERNAL TABLE avfav_analyze_bad_port(tmp1 timestamp, 
-                            num1  integer, 
-                            num2  integer, 
-                            num3  integer, 
-                            num4  integer,
-                            t1    text,
-                            t2    text,
-                            t3    text,
-                            t4    text,
-                            t5    text,
-                            t6    text,
-                            dub1  double precision,
-                            dub2  double precision, 
-                            dub3  double precision, 
-                            ft1   real, 
-                            ft2   real, 
-                            ft3   real, 
-                            ln1   bigint, 
-                            ln2   bigint, 
-                            ln3   bigint, 
-                            bt    bytea)
-LOCATION ('pxf://@hostname@:12345/gpdb_regression_data/avroformat_inside_avrofile.avro?PROFILE=Avro')
-FORMAT 'custom' (formatter='pxfwritable_import');
-
--- Table that points to a wrong analyzer class.
-CREATE EXTERNAL TABLE avfav_analyze_bad_class(tmp1 timestamp, 
-                            num1  integer, 
-                            num2  integer, 
-                            num3  integer, 
-                            num4  integer,
-                            t1    text,
-                            t2    text,
-                            t3    text,
-                            t4    text,
-                            t5    text,
-                            t6    text,
-                            dub1  double precision,
-                            dub2  double precision, 
-                            dub3  double precision, 
-                            ft1   real, 
-                            ft2   real, 
-                            ft3   real, 
-                            ln1   bigint, 
-                            ln2   bigint, 
-                            ln3   bigint, 
-                            bt    bytea)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/avroformat_inside_avrofile.avro?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.AvroFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.AvroResolver&ANALYZER=NoSuchAnalyzer')
-FORMAT 'custom' (formatter='pxfwritable_import');
-
--- verify that default stats remain after ANALYZE with GUC off
-SET pxf_enable_stat_collection = false;
-ANALYZE avfav_analyze_good;
-SELECT COUNT(*) FROM pg_class WHERE relname = 'avfav_analyze_good' AND relpages = 1000 AND reltuples = 1000000;
-
--- verify that stats get updated after ANALYZE with GUC on
--- NOTE: we can't guarantee the same results on each machine. We just check that defaults were changed
-SET pxf_enable_stat_collection = true;
-ANALYZE avfav_analyze_good;
-SELECT COUNT(*) FROM pg_class WHERE relname = 'avfav_analyze_good' 
-                              AND relpages != 1000 AND relpages > 0
-                              AND reltuples != 1000000 AND reltuples > 0;
-
--- verify that stats stay updated to most recent value after ANALYZE with GUC off
-SET pxf_enable_stat_collection = false;
-ANALYZE avfav_analyze_good;
-SELECT COUNT(*) FROM pg_class WHERE relname = 'avfav_analyze_good' 
-                              AND relpages != 1000 AND relpages > 0
-                              AND reltuples != 1000000 AND reltuples > 0;
-
--- verify that ANALYZE doesn't break while checking out a table that can't connect to the analyzer module
--- TODO: find a way to verify that previous stat values remain.
-SET pxf_enable_stat_collection = true;
-ANALYZE avfav_analyze_bad_port;
-SELECT COUNT(*) FROM pg_class WHERE relname = 'avfav_analyze_bad_port' AND relpages = 1000 AND reltuples = 1000000;
-ANALYZE avfav_analyze_bad_class;
-SELECT COUNT(*) FROM pg_class WHERE relname = 'avfav_analyze_bad_class' AND relpages = 1000 AND reltuples = 1000000;
-
-SET pxf_enable_stat_collection = true; --reset to default
-
-DROP EXTERNAL TABLE avfav_analyze_good;
-DROP EXTERNAL TABLE avfav_analyze_bad_port;
-DROP EXTERNAL TABLE avfav_analyze_bad_class;
-
--- verify pg_remote_credentials exist with the expected structure
-SELECT * FROM pg_remote_credentials;
-
---
--- Cleanup: delete all data that was copied into hdfs
---
--- start_ignore
-\! ${HADOOP_ROOT}/bin/hadoop fs -rm -r /gpdb_regression_data
--- end_ignore

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7eeeec9d/src/test/regress/input/pxf_hdfs_ext.source
----------------------------------------------------------------------
diff --git a/src/test/regress/input/pxf_hdfs_ext.source b/src/test/regress/input/pxf_hdfs_ext.source
deleted file mode 100644
index ccb78a0..0000000
--- a/src/test/regress/input/pxf_hdfs_ext.source
+++ /dev/null
@@ -1,76 +0,0 @@
---
--- PXF HDFS extended regression suite
---
--- Prerequisites:
---
---   Must have a running hdfs with REST service on port 50070
---   Must have HADOOP_ROOT, HBASE_ROOT, HIVE_ROOT and ZOOKEEPER_ROOT set.
---
--- TODO: test gpdbwritable write/read when it is enabled.
--- TODO: test PB, AVRO, THRIFT when it is enabled (read only, with pre formatted files).
--- TODO: test protocol validator for pxf once written.
--- TODO: test parameter passing, filter passing
-
--- start_matchsubs
---                                                                                               
--- # create a match/subs expression to handle ip addresses that change
---
--- m/(ERROR|WARNING):.*remote component error.*\(\d+\).*from.*'\d+\.\d+\.\d+\.\d+:\d+'.*/
--- s/'\d+\.\d+\.\d+\.\d+:\d+'/'SOME_IP:SOME_PORT'/
---
--- end_matchsubs
--- start_matchignore
---
--- m/.*Unable to load native-hadoop library for your platform.*/
---
--- end_matchignore
---------------------------------------------------------------------------------
--- GPHDFS
---------------------------------------------------------------------------------
---
--- Load HDFS with test data
---
-\! ${HADOOP_ROOT}/bin/hadoop fs -mkdir /gpdb_regression_data
-\! sh @abs_srcdir@/helpers/create_table_file.sh @abs_srcdir@/data/pxf/multiblock.tbl
-\! ${HADOOP_ROOT}/bin/hadoop fs -copyFromLocal @abs_srcdir@/data/pxf/multiblock.tbl /gpdb_regression_data/multiblock.tbl
-
---
--- Test multiblock file
---
-CREATE EXTERNAL TABLE mbt(t1 text,
-                          a1 integer)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/multiblock.tbl?PROFILE=HdfsTextSimple')
-FORMAT 'TEXT' (DELIMITER ',');
-SELECT t1, a1 FROM mbt ORDER BY t1 LIMIT 10;
-SELECT SUM(a1) from mbt;
-SELECT COUNT(*) FROM mbt;
-SELECT cnt < 32768000 AS check FROM (SELECT COUNT(*) AS cnt FROM mbt WHERE gp_segment_id = 0) AS a;
-DROP EXTERNAL TABLE mbt;
---
--- Test extensibility - read
---
-CREATE EXTERNAL TABLE extens(num1   integer,
-                             t1      text,
-                             num2    integer)
-LOCATION ('pxf://@hostname@:50070/regression_location?FRAGMENTER=DummyFragmenter&ACCESSOR=DummyAccessor&RESOLVER=DummyResolver&ANALYZER=DummyAnalyzer')
-FORMAT 'custom' (formatter = 'pxfwritable_import');
-
-SELECT num1, t1 FROM extens ORDER BY num1, t1;
-ANALYZE extens;
-DROP EXTERNAL TABLE extens;
---
--- Test extensibility - write
---
-CREATE WRITABLE EXTERNAL TABLE extens_write(t1 text, t2 text)
-LOCATION ('pxf://@hostname@:50070/regression_location?ACCESSOR=DummyAccessor&RESOLVER=DummyResolver')
-FORMAT 'custom' (formatter = 'pxfwritable_export');
-
-INSERT INTO extens_write VALUES ('something', 'big'), ('is', 'going'), ('to', 'happen');
-DROP EXTERNAL TABLE extens_write;
---
--- Cleanup: delete all data that was copied into hdfs
---
--- start_ignore
-\! ${HADOOP_ROOT}/bin/hadoop fs -rm -r /gpdb_regression_data
-\! rm @abs_srcdir@/data/pxf/multiblock.tbl
--- end_ignore


Mime
View raw message