Return-Path: Delivered-To: apmail-hadoop-hive-commits-archive@minotaur.apache.org Received: (qmail 13000 invoked from network); 23 Aug 2009 02:12:09 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.3) by minotaur.apache.org with SMTP; 23 Aug 2009 02:12:09 -0000 Received: (qmail 2235 invoked by uid 500); 23 Aug 2009 01:45:51 -0000 Delivered-To: apmail-hadoop-hive-commits-archive@hadoop.apache.org Received: (qmail 2199 invoked by uid 500); 23 Aug 2009 01:45:51 -0000 Mailing-List: contact hive-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hadoop.apache.org Delivered-To: mailing list hive-commits@hadoop.apache.org Received: (qmail 2188 invoked by uid 99); 23 Aug 2009 01:45:51 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Sun, 23 Aug 2009 01:45:51 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Sun, 23 Aug 2009 01:45:46 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 344C22388895; Sun, 23 Aug 2009 01:45:25 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r806917 - in /hadoop/hive/branches/branch-0.4: ./ jdbc/src/java/org/apache/hadoop/hive/jdbc/ jdbc/src/test/org/apache/hadoop/hive/jdbc/ Date: Sun, 23 Aug 2009 01:45:24 -0000 To: hive-commits@hadoop.apache.org From: rmurthy@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20090823014525.344C22388895@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: rmurthy Date: Sun Aug 23 01:45:24 2009 New Revision: 806917 URL: http://svn.apache.org/viewvc?rev=806917&view=rev Log: HIVE-679. Adding the basic JDBC methods to allow querying using the SQuirrelSQL tool. (Bill Graham via rmurthy) Modified: hadoop/hive/branches/branch-0.4/CHANGES.txt hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSet.java hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java hadoop/hive/branches/branch-0.4/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Modified: hadoop/hive/branches/branch-0.4/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/CHANGES.txt?rev=806917&r1=806916&r2=806917&view=diff ============================================================================== --- hadoop/hive/branches/branch-0.4/CHANGES.txt (original) +++ hadoop/hive/branches/branch-0.4/CHANGES.txt Sun Aug 23 01:45:24 2009 @@ -220,6 +220,9 @@ HIVE-760. Add version info to META-INF/MANIFEST.MF (Bill Graham via rmurthy) + HIVE-679. Adding the basic JDBC methods to allow querying using the + SQuirrelSQL tool. (Bill Graham via rmurthy) + OPTIMIZATIONS HIVE-279. Predicate Pushdown support (Prasad Chakka via athusoo). Modified: hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java?rev=806917&r1=806916&r2=806917&view=diff ============================================================================== --- hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java (original) +++ hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java Sun Aug 23 01:45:24 2009 @@ -49,7 +49,10 @@ public class HiveConnection implements java.sql.Connection { JdbcSessionState session; + private TTransport transport; private HiveInterface client; + boolean isClosed = true; + SQLWarning warningChain = null; private static final String URI_PREFIX = "jdbc:hive://"; /** @@ -86,11 +89,12 @@ } catch (Exception e) { } - TTransport transport = new TSocket(host, port); + transport = new TSocket(host, port); TProtocol protocol = new TBinaryProtocol(transport); client = new HiveClient(protocol); transport.open(); } + isClosed = false; } /* (non-Javadoc) @@ -98,8 +102,7 @@ */ public void clearWarnings() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + this.warningChain = null; } /* (non-Javadoc) @@ -107,8 +110,12 @@ */ public void close() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + try { + if (transport != null) transport.close(); + } + finally { + isClosed = true; + } } /* (non-Javadoc) @@ -173,6 +180,7 @@ */ public Statement createStatement() throws SQLException { + if (isClosed) throw new SQLException("Can't create Statement, connection is closed"); return new HiveStatement(session, client); } @@ -283,8 +291,7 @@ */ public SQLWarning getWarnings() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return this.warningChain; } /* (non-Javadoc) @@ -292,8 +299,7 @@ */ public boolean isClosed() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return isClosed; } /* (non-Javadoc) Modified: hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java?rev=806917&r1=806916&r2=806917&view=diff ============================================================================== --- hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java (original) +++ hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java Sun Aug 23 01:45:24 2009 @@ -22,6 +22,10 @@ import java.sql.ResultSet; import java.sql.RowIdLifetime; import java.sql.SQLException; +import java.net.URL; +import java.util.jar.Manifest; +import java.util.jar.Attributes; +import java.io.IOException; public class HiveDatabaseMetaData implements java.sql.DatabaseMetaData { @@ -216,8 +220,7 @@ */ public String getDatabaseProductName() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return "Hive"; } /* (non-Javadoc) @@ -225,8 +228,7 @@ */ public String getDatabaseProductVersion() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return "0"; } /* (non-Javadoc) @@ -261,7 +263,7 @@ */ public String getDriverName() throws SQLException { - return new String("hive"); + return fetchManifestAttribute(Attributes.Name.IMPLEMENTATION_TITLE); } /* (non-Javadoc) @@ -269,7 +271,7 @@ */ public String getDriverVersion() throws SQLException { - return new String("0"); + return fetchManifestAttribute(Attributes.Name.IMPLEMENTATION_VERSION); } /* (non-Javadoc) @@ -581,8 +583,8 @@ public ResultSet getProcedures(String catalog, String schemaPattern, String procedureNamePattern) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + //TODO: return empty result set here + return null; } /* (non-Javadoc) @@ -1065,8 +1067,7 @@ */ public boolean supportsCatalogsInTableDefinitions() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return false; } /* (non-Javadoc) @@ -1275,8 +1276,7 @@ */ public boolean supportsMultipleResultSets() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return false; } /* (non-Javadoc) @@ -1419,8 +1419,7 @@ */ public boolean supportsSchemasInDataManipulation() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return false; } /* (non-Javadoc) @@ -1455,8 +1454,7 @@ */ public boolean supportsSchemasInTableDefinitions() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return false; } /* (non-Javadoc) @@ -1491,8 +1489,7 @@ */ public boolean supportsStoredProcedures() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return false; } /* (non-Javadoc) @@ -1622,4 +1619,44 @@ throw new SQLException("Method not supported"); } + /** + * Lazy-load manifest attributes as needed. + */ + private static Attributes manifestAttributes = null; + + /** + * Loads the manifest attributes from the jar. + * @throws java.net.MalformedURLException + * @throws IOException + */ + private synchronized void loadManifestAttributes() throws IOException { + if(manifestAttributes != null) return; + Class clazz = this.getClass(); + String classContainer = clazz.getProtectionDomain().getCodeSource().getLocation().toString(); + URL manifestUrl = new URL("jar:" + classContainer + "!/META-INF/MANIFEST.MF"); + Manifest manifest = new Manifest(manifestUrl.openStream()); + manifestAttributes = manifest.getMainAttributes(); + } + + /** + * Helper to initialize attributes and return one. + * + * @param attributeName + * @return + * @throws SQLException + */ + private String fetchManifestAttribute(Attributes.Name attributeName) throws SQLException { + try { + loadManifestAttributes(); + } catch (IOException e) { + throw new SQLException("Couldn't load manifest attributes.", e); + } + return manifestAttributes.getValue(attributeName); + } + + public static void main(String[] args) throws SQLException { + HiveDatabaseMetaData meta = new HiveDatabaseMetaData(); + System.out.println("DriverName: " + meta.getDriverName()); + System.out.println("DriverVersion: " + meta.getDriverVersion()); + } } Modified: hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java?rev=806917&r1=806916&r2=806917&view=diff ============================================================================== --- hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java (original) +++ hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java Sun Aug 23 01:45:24 2009 @@ -50,6 +50,31 @@ private static final boolean JDBC_COMPLIANT = false; /** + * The required prefix for the connection url + */ + private static final String URL_PREFIX = "jdbc:hive://"; + + /** + * If host is provided, without a port + */ + private static final String DEFAULT_PORT = "10000"; + + /** + * Property key for the database name + */ + private static final String DBNAME_PROPERTY_KEY = "DBNAME"; + + /** + * Property key for the Hive Server host + */ + private static final String HOST_PROPERTY_KEY = "HOST"; + + /** + * Property key for the Hive Server port + */ + private static final String PORT_PROPERTY_KEY = "PORT"; + + /** * */ public HiveDriver() { @@ -75,7 +100,7 @@ */ public boolean acceptsURL(String url) throws SQLException { - return Pattern.matches("jdbc:hive://", url); + return Pattern.matches(URL_PREFIX, url); } @@ -103,11 +128,38 @@ return MINOR_VERSION; } - public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + if (info == null) { + info = new Properties(); + } + + if ((url != null) && url.startsWith(URL_PREFIX)) { + info = parseURL(url, info); + } + + DriverPropertyInfo hostProp = new DriverPropertyInfo(HOST_PROPERTY_KEY, + info.getProperty(HOST_PROPERTY_KEY, "")); + hostProp.required = false; + hostProp.description = "Hostname of Hive Server"; + + DriverPropertyInfo portProp = new DriverPropertyInfo(PORT_PROPERTY_KEY, + info.getProperty(PORT_PROPERTY_KEY, "")); + portProp.required = false; + portProp.description = "Port number of Hive Server"; + + DriverPropertyInfo dbProp = new DriverPropertyInfo(DBNAME_PROPERTY_KEY, + info.getProperty(DBNAME_PROPERTY_KEY, "default")); + dbProp.required = false; + dbProp.description = "Database name"; + + DriverPropertyInfo[] dpi = new DriverPropertyInfo[3]; + + dpi[0] = hostProp; + dpi[1] = portProp; + dpi[2] = dbProp; + + return dpi; } /** @@ -118,4 +170,49 @@ return JDBC_COMPLIANT; } + /** + * Takes a url in the form of jdbc:hive://[hostname]:[port]/[db_name] and parses it. + * Everything after jdbc:hive// is optional. + * + * @param url + * @param defaults + * @return + * @throws java.sql.SQLException + */ + private Properties parseURL(String url, Properties defaults) + throws java.sql.SQLException { + Properties urlProps = (defaults != null) ? new Properties(defaults) + : new Properties(); + + if (url == null || !url.startsWith(URL_PREFIX)) { + throw new SQLException("Invalid connection url: " + url); + } + + if (url.length() <= URL_PREFIX.length()) return urlProps; + + // [hostname]:[port]/[db_name] + String connectionInfo = url.substring(URL_PREFIX.length()); + + // [hostname]:[port] [db_name] + String[] hostPortAndDatabase = connectionInfo.split("/", 2); + + // [hostname]:[port] + if (hostPortAndDatabase[0].length() > 0) { + String[] hostAndPort = hostPortAndDatabase[0].split(":", 2); + urlProps.put(HOST_PROPERTY_KEY, hostAndPort[0]); + if (hostAndPort.length > 1) { + urlProps.put(PORT_PROPERTY_KEY, hostAndPort[1]); + } + else { + urlProps.put(PORT_PROPERTY_KEY, DEFAULT_PORT); + } + } + + // [db_name] + if (hostPortAndDatabase.length > 1) { + urlProps.put(DBNAME_PROPERTY_KEY, hostPortAndDatabase[1]); + } + + return urlProps; + } } Modified: hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSet.java URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSet.java?rev=806917&r1=806916&r2=806917&view=diff ============================================================================== --- hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSet.java (original) +++ hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSet.java Sun Aug 23 01:45:24 2009 @@ -58,20 +58,31 @@ List columnNames; List columnTypes; + SQLWarning warningChain = null; + boolean wasNull = false; + int maxRows = 0; + int rowsFetched = 0; + /** * */ @SuppressWarnings("unchecked") - public HiveResultSet(HiveInterface client) { + public HiveResultSet(HiveInterface client, int maxRows) throws SQLException { this.client = client; this.row = new ArrayList(); + this.maxRows = maxRows; initDynamicSerde(); } + @SuppressWarnings("unchecked") + public HiveResultSet(HiveInterface client) throws SQLException { + this(client, 0); + } + /** * Instantiate the dynamic serde used to deserialize the result row */ - public void initDynamicSerde() { + public void initDynamicSerde() throws SQLException { try { Schema fullSchema = client.getThriftSchema(); List schema = fullSchema.getFieldSchemas(); @@ -79,12 +90,14 @@ columnTypes = new ArrayList(); String serDDL; - + if ((schema != null) && (!schema.isEmpty())) { serDDL = new String("struct result { "); for (int pos = 0; pos < schema.size(); pos++) { if (pos != 0) serDDL = serDDL.concat(","); + columnTypes.add(schema.get(pos).getType()); + columnNames.add(schema.get(pos).getName()); serDDL = serDDL.concat(schema.get(pos).getType()); serDDL = serDDL.concat(" "); serDDL = serDDL.concat(schema.get(pos).getName()); @@ -104,8 +117,7 @@ ds.initialize(new Configuration(), dsp); } catch (Exception ex) { ex.printStackTrace(); - System.exit(1); - // TODO: Decide what to do here. + throw new SQLException("Could not create ResultSet: " + ex.getMessage()); } } @@ -150,8 +162,7 @@ */ public void clearWarnings() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + warningChain = null; } /* (non-Javadoc) @@ -305,7 +316,7 @@ */ public boolean getBoolean(int columnIndex) throws SQLException { - Object obj = row.get(columnIndex-1); + Object obj = getObject(columnIndex); if (Number.class.isInstance(obj)) { return ((Number)obj).intValue() != 0; } @@ -326,7 +337,7 @@ */ public byte getByte(int columnIndex) throws SQLException { - Object obj = row.get(columnIndex-1); + Object obj = getObject(columnIndex); if (Number.class.isInstance(obj)) { return ((Number)obj).byteValue(); } @@ -419,8 +430,11 @@ */ public Date getDate(int columnIndex) throws SQLException { + Object obj = getObject(columnIndex); + if (obj == null) return null; + try { - return Date.valueOf((String)row.get(columnIndex-1)); + return Date.valueOf((String)obj); } catch (Exception e) { throw new SQLException("Cannot convert column " + columnIndex + " to date: " + e.toString()); @@ -460,7 +474,7 @@ public double getDouble(int columnIndex) throws SQLException { try { - Object obj = row.get(columnIndex-1); + Object obj = getObject(columnIndex); if (Number.class.isInstance(obj)) { return ((Number)obj).doubleValue(); } @@ -504,7 +518,7 @@ public float getFloat(int columnIndex) throws SQLException { try { - Object obj = row.get(columnIndex-1); + Object obj = getObject(columnIndex); if (Number.class.isInstance(obj)) { return ((Number)obj).floatValue(); } @@ -538,7 +552,7 @@ public int getInt(int columnIndex) throws SQLException { try { - Object obj = row.get(columnIndex-1); + Object obj = getObject(columnIndex); if (Number.class.isInstance(obj)) { return ((Number)obj).intValue(); } @@ -564,7 +578,7 @@ public long getLong(int columnIndex) throws SQLException { try { - Object obj = row.get(columnIndex-1); + Object obj = getObject(columnIndex); if (Number.class.isInstance(obj)) { return ((Number)obj).longValue(); } @@ -651,7 +665,18 @@ */ public Object getObject(int columnIndex) throws SQLException { + if (row == null) { + throw new SQLException("No row found."); + } + + if (columnIndex > row.size()) { + throw new SQLException("Invalid columnIndex: " + columnIndex); + } + try { + this.wasNull = false; + if (row.get(columnIndex-1) == null) this.wasNull = true; + return row.get(columnIndex-1); } catch (Exception e) { @@ -757,7 +782,7 @@ public short getShort(int columnIndex) throws SQLException { try { - Object obj = row.get(columnIndex-1); + Object obj = getObject(columnIndex); if (Number.class.isInstance(obj)) { return ((Number)obj).shortValue(); } @@ -793,7 +818,10 @@ public String getString(int columnIndex) throws SQLException { // Column index starts from 1, not 0. - return row.get(columnIndex - 1).toString(); + Object obj = getObject(columnIndex); + if (obj == null) return null; + + return obj.toString(); } /* (non-Javadoc) @@ -929,8 +957,7 @@ */ public SQLWarning getWarnings() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return warningChain; } /* (non-Javadoc) @@ -1022,9 +1049,12 @@ */ public boolean next() throws SQLException { + if (this.maxRows > 0 && this.rowsFetched >= this.maxRows) return false; + String row_str = ""; try { row_str = (String)client.fetchOne(); + this.rowsFetched++; if (!row_str.equals("")) { Object o = ds.deserialize(new BytesWritable(row_str.getBytes())); row = (ArrayList)o; @@ -1903,8 +1933,7 @@ */ public boolean wasNull() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return this.wasNull; } /* (non-Javadoc) Modified: hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java?rev=806917&r1=806916&r2=806917&view=diff ============================================================================== --- hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java (original) +++ hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java Sun Aug 23 01:45:24 2009 @@ -18,13 +18,17 @@ package org.apache.hadoop.hive.jdbc; +import org.apache.hadoop.hive.serde.Constants; + import java.sql.SQLException; +import java.sql.ResultSetMetaData; +import java.sql.Types; import java.util.List; public class HiveResultSetMetaData implements java.sql.ResultSetMetaData { List columnNames; List columnTypes; - + public HiveResultSetMetaData(List columnNames, List columnTypes) { this.columnNames = columnNames; this.columnTypes = columnTypes; @@ -61,8 +65,22 @@ */ public int getColumnDisplaySize(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + + // taking a stab at appropriate values + switch(getColumnType(column)) { + case Types.VARCHAR: + case Types.BIGINT: + return 32; + case Types.TINYINT: + return 2; + case Types.BOOLEAN: + return 8; + case Types.DOUBLE: + case Types.INTEGER: + return 16; + default: + return 32; + } } /* (non-Javadoc) @@ -87,8 +105,31 @@ */ public int getColumnType(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + if (columnTypes == null) + throw new SQLException("Could not determine column type name for ResultSet"); + + if (column < 1 || column > columnTypes.size()) + throw new SQLException("Invalid column value: " + column); + + // we need to convert the thrift type to the SQL type + String type = columnTypes.get(column-1); + + // we need to convert the thrift type to the SQL type + //TODO: this would be better handled in an enum + if ("string".equals(type)) + return Types.VARCHAR; + else if ("bool".equals(type)) + return Types.BOOLEAN; + else if ("double".equals(type)) + return Types.DOUBLE; + else if ("byte".equals(type)) + return Types.TINYINT; + else if ("i32".equals(type)) + return Types.INTEGER; + else if ("i64".equals(type)) + return Types.BIGINT; + + throw new SQLException("Inrecognized column type: " + type); } /* (non-Javadoc) @@ -96,7 +137,30 @@ */ public String getColumnTypeName(int column) throws SQLException { - return columnTypes.get(column-1); + if (columnTypes == null) + throw new SQLException("Could not determine column type name for ResultSet"); + + if (column < 1 || column > columnTypes.size()) + throw new SQLException("Invalid column value: " + column); + + // we need to convert the thrift type to the SQL type name + //TODO: this would be better handled in an enum + String type = columnTypes.get(column-1); + if ("string".equals(type)) + return Constants.STRING_TYPE_NAME; + else if ("double".equals(type)) + return Constants.DOUBLE_TYPE_NAME; + + else if ("bool".equals(type)) + return Constants.BOOLEAN_TYPE_NAME; + else if ("byte".equals(type)) + return Constants.TINYINT_TYPE_NAME; + else if ("i32".equals(type)) + return Constants.INT_TYPE_NAME; + else if ("i64".equals(type)) + return Constants.BIGINT_TYPE_NAME; + + throw new SQLException("Inrecognized column type: " + type); } /* (non-Javadoc) @@ -104,8 +168,9 @@ */ public int getPrecision(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + if (Types.DOUBLE == getColumnType(column)) return -1; //Do we have a precision limit? + + return 0; } /* (non-Javadoc) @@ -113,8 +178,9 @@ */ public int getScale(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + if (Types.DOUBLE == getColumnType(column)) return -1; //Do we have a scale limit? + + return 0; } /* (non-Javadoc) @@ -140,8 +206,8 @@ */ public boolean isAutoIncrement(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + // Hive doesn't have an auto-increment concept + return false; } /* (non-Javadoc) @@ -158,8 +224,8 @@ */ public boolean isCurrency(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + // Hive doesn't support a currency type + return false; } /* (non-Javadoc) @@ -176,8 +242,8 @@ */ public int isNullable(int column) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + // Hive doesn't have the concept of not-null + return ResultSetMetaData.columnNullable; } /* (non-Javadoc) Modified: hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java?rev=806917&r1=806916&r2=806917&view=diff ============================================================================== --- hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java (original) +++ hadoop/hive/branches/branch-0.4/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java Sun Aug 23 01:45:24 2009 @@ -29,6 +29,30 @@ JdbcSessionState session; HiveInterface client; /** + * We need to keep a reference to the result set to support the following: + * + * statement.execute(String sql); + * statement.getResultSet(); + * + */ + ResultSet resultSet = null; + + /** + * The maximum number of rows this statement should return (0 => all rows) + */ + int maxRows = 0; + + /** + * Add SQLWarnings to the warningChain if needed + */ + SQLWarning warningChain = null; + + /** + * Keep state so we can fail certain calls made after close(); + */ + boolean isClosed = false; + + /** * */ public HiveStatement(JdbcSessionState session, HiveInterface client) { @@ -68,8 +92,7 @@ */ public void clearWarnings() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + this.warningChain = null; } /* (non-Javadoc) @@ -77,8 +100,10 @@ */ public void close() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + //TODO: how to properly shut down the client? + client = null; + resultSet = null; + isClosed = true; } /* (non-Javadoc) @@ -86,7 +111,11 @@ */ public boolean execute(String sql) throws SQLException { - return true; + ResultSet rs = executeQuery(sql); + + //TODO: this should really check if there are results, but there's no easy + //way to do that without calling rs.next(); + return rs != null; } /* (non-Javadoc) @@ -132,12 +161,17 @@ */ public ResultSet executeQuery(String sql) throws SQLException { + if (this.isClosed) + throw new SQLException("Can't execute after statement has been closed"); + try { + this.resultSet = null; client.execute(sql); } catch (Exception ex) { throw new SQLException(ex.toString()); } - return new HiveResultSet(client); + this.resultSet = new HiveResultSet(client, maxRows); + return this.resultSet; } /* (non-Javadoc) @@ -233,8 +267,7 @@ */ public int getMaxRows() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return this.maxRows; } /* (non-Javadoc) @@ -269,8 +302,7 @@ */ public ResultSet getResultSet() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return this.resultSet; } /* (non-Javadoc) @@ -305,8 +337,7 @@ */ public int getUpdateCount() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return 0; } /* (non-Javadoc) @@ -314,8 +345,7 @@ */ public SQLWarning getWarnings() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return this.warningChain; } /* (non-Javadoc) @@ -323,8 +353,7 @@ */ public boolean isClosed() throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + return this.isClosed; } /* (non-Javadoc) @@ -386,8 +415,8 @@ */ public void setMaxRows(int max) throws SQLException { - // TODO Auto-generated method stub - throw new SQLException("Method not supported"); + if (max < 0) throw new SQLException("max must be >= 0"); + this.maxRows = max; } /* (non-Javadoc) Modified: hadoop/hive/branches/branch-0.4/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=806917&r1=806916&r2=806917&view=diff ============================================================================== --- hadoop/hive/branches/branch-0.4/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original) +++ hadoop/hive/branches/branch-0.4/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Sun Aug 23 01:45:24 2009 @@ -20,18 +20,15 @@ import java.sql.SQLException; import java.sql.Connection; -import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.Statement; -import java.sql.Driver; import java.sql.DriverManager; -import java.util.Properties; +import java.sql.DatabaseMetaData; +import java.sql.DriverPropertyInfo; +import java.sql.ResultSetMetaData; +import java.sql.Types; import junit.framework.TestCase; -import javax.naming.*; -import javax.naming.directory.*; -import javax.sql.DataSource; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.hive.conf.HiveConf; public class TestJdbcDriver extends TestCase { @@ -62,6 +59,7 @@ con = DriverManager.getConnection("jdbc:hive://", "", ""); } assertNotNull("Connection is null", con); + assertFalse("Connection should not be closed", con.isClosed()); Statement stmt = con.createStatement(); assertNotNull("Statement is null", stmt); @@ -110,33 +108,53 @@ res = stmt.executeQuery("drop table " + partitionedTableName); assertFalse(res.next()); + con.close(); + assertTrue("Connection should be closed", con.isClosed()); + + Exception expectedException = null; + try { + con.createStatement(); + } + catch(Exception e) { + expectedException = e; + } + + assertNotNull("createStatement() on closed connection should throw exception", + expectedException); } public final void testSelectAll() throws Exception { + doTestSelectAll(this.tableName, -1); // tests not setting maxRows (return all) + doTestSelectAll(this.tableName, 0); // tests setting maxRows to 0 (return all) + } + + public final void testSelectAllPartioned() throws Exception { + doTestSelectAll(this.partitionedTableName, -1); // tests not setting maxRows (return all) + doTestSelectAll(this.partitionedTableName, 0); // tests setting maxRows to 0 (return all) + } + + public final void testSelectAllMaxRows() throws Exception { + doTestSelectAll(this.tableName, 100); + } + + private final void doTestSelectAll(String tableName, int maxRows) throws Exception { Statement stmt = con.createStatement(); - assertNotNull("Statement is null", stmt); + if (maxRows >= 0) stmt.setMaxRows(maxRows); - ResultSet res; + //JDBC says that 0 means return all, which is the default + int expectedMaxRows = maxRows < 1 ? 0 : maxRows; - // TODO: There is no schema for show tables or describe table. - /* - stmt.executeQuery("drop table michi1"); - stmt.executeQuery("drop table michi2"); - stmt.executeQuery("drop table michi3"); - stmt.executeQuery("create table michi1 (num int)"); - stmt.executeQuery("create table michi2 (num int)"); - stmt.executeQuery("create table michi3 (num int)"); + assertNotNull("Statement is null", stmt); + assertEquals("Statement max rows not as expected", expectedMaxRows, stmt.getMaxRows()); + assertFalse("Statement should not be closed", stmt.isClosed()); - res = stmt.executeQuery("show tables"); - res = stmt.executeQuery("describe michi1"); - while (res.next()) { - System.out.println(res.getString(0)); - } - */ + ResultSet res; // run some queries res = stmt.executeQuery("select * from " + tableName); assertNotNull("ResultSet is null", res); + assertTrue("getResultSet() not returning expected ResultSet", res == stmt.getResultSet()); + assertEquals("get update count not as expected", 0, stmt.getUpdateCount()); int i = 0; boolean moreRow = res.next(); @@ -146,6 +164,10 @@ res.getInt(1); res.getString(1); res.getString(2); + assertFalse("Last result value was not null", res.wasNull()); + assertNull("No warnings should be found on ResultSet", res.getWarnings()); + res.clearWarnings(); //verifying that method is supported + //System.out.println(res.getString(1) + " " + res.getString(2)); assertEquals("getInt and getString don't align for the same result value", String.valueOf(res.getInt(1)), res.getString(1)); @@ -159,47 +181,22 @@ throw new Exception(e.toString()); } } - // supposed to get 500 rows - assertEquals(500, i); + + // supposed to get 500 rows if maxRows isn't set + int expectedRowCount = maxRows > 0 ? maxRows : 500; + assertEquals("Incorrect number of rows returned", expectedRowCount, i); // should have no more rows assertEquals(false, moreRow); - } - public final void testSelectAllPartitioned() throws Exception { - Statement stmt = con.createStatement(); - assertNotNull("Statement is null", stmt); + assertNull("No warnings should be found on statement", stmt.getWarnings()); + stmt.clearWarnings(); //verifying that method is supported - // run some queries - ResultSet res = stmt.executeQuery("select * from " + partitionedTableName); - assertNotNull("ResultSet is null", res); - int i = 0; - - boolean moreRow = res.next(); - while (moreRow) { - try { - i++; - res.getInt(1); - res.getString(1); - res.getString(2); - //System.out.println(res.getString(1) + " " + res.getString(2)); - assertEquals("getInt and getString don't align for the same result value", - String.valueOf(res.getInt(1)), res.getString(1)); - assertEquals("Unexpected result found", - "val_" + res.getString(1), res.getString(2)); - moreRow = res.next(); - } - catch (SQLException e) { - System.out.println(e.toString()); - e.printStackTrace(); - throw new Exception(e.toString()); - } - } - // supposed to get 500 rows - assertEquals(500, i); + assertNull("No warnings should be found on connection", con.getWarnings()); + con.clearWarnings(); //verifying that method is supported - // should have no more rows - assertEquals(false, moreRow); + stmt.close(); + assertTrue("Statement should be closed", stmt.isClosed()); } public void testShowTables() throws SQLException { @@ -237,4 +234,80 @@ } + public void testDatabaseMetaData() throws SQLException { + DatabaseMetaData meta = con.getMetaData(); + + assertEquals("Hive", meta.getDatabaseProductName()); + assertEquals("0", meta.getDatabaseProductVersion()); + assertNull(meta.getProcedures(null, null, null)); + assertFalse(meta.supportsCatalogsInTableDefinitions()); + assertFalse(meta.supportsSchemasInTableDefinitions()); + assertFalse(meta.supportsSchemasInDataManipulation()); + assertFalse(meta.supportsMultipleResultSets()); + assertFalse(meta.supportsStoredProcedures()); + } + + public void testResultSetMetaData() throws SQLException { + Statement stmt = con.createStatement(); + ResultSet res = stmt.executeQuery("drop table " + tableName); + + //creating a table with tinyint is failing currently so not including + res = stmt.executeQuery("create table " + tableName + " (a string, b boolean, c bigint, d int, f double)"); + res = stmt.executeQuery("select * from " + tableName + " limit 1"); + + ResultSetMetaData meta = res.getMetaData(); + assertEquals("Unexpected column type", Types.VARCHAR, meta.getColumnType(1)); + assertEquals("Unexpected column type", Types.BOOLEAN, meta.getColumnType(2)); + assertEquals("Unexpected column type", Types.BIGINT, meta.getColumnType(3)); + assertEquals("Unexpected column type", Types.INTEGER, meta.getColumnType(4)); + assertEquals("Unexpected column type", Types.DOUBLE, meta.getColumnType(5)); + assertEquals("Unexpected column type name", "string", meta.getColumnTypeName(1)); + assertEquals("Unexpected column type name", "boolean", meta.getColumnTypeName(2)); + assertEquals("Unexpected column type name", "bigint", meta.getColumnTypeName(3)); + assertEquals("Unexpected column type name", "int", meta.getColumnTypeName(4)); + assertEquals("Unexpected column type name", "double", meta.getColumnTypeName(5)); + assertEquals("Unexpected column display size", 32, meta.getColumnDisplaySize(1)); + assertEquals("Unexpected column display size", 8, meta.getColumnDisplaySize(2)); + assertEquals("Unexpected column display size", 32, meta.getColumnDisplaySize(3)); + assertEquals("Unexpected column display size", 16, meta.getColumnDisplaySize(4)); + assertEquals("Unexpected column display size", 16, meta.getColumnDisplaySize(5)); + + for (int i = 1; i <= 5; i++) { + assertFalse(meta.isAutoIncrement(i)); + assertFalse(meta.isCurrency(i)); + assertEquals(ResultSetMetaData.columnNullable, meta.isNullable(i)); + + int expectedPrecision = i == 5 ? -1 : 0; + int expectedScale = i == 5 ? -1 : 0; + assertEquals("Unexpected precision", expectedPrecision, meta.getPrecision(i)); + assertEquals("Unexpected scale", expectedScale, meta.getScale(i)); + } + } + + // [url] [host] [port] [db] + private static final String[][] URL_PROPERTIES = new String[][] { + {"jdbc:hive://", "", "", "default"}, + {"jdbc:hive://localhost:10001/default", "localhost", "10001", "default"}, + {"jdbc:hive://localhost/notdefault", "localhost", "10000", "notdefault"}, + {"jdbc:hive://foo:1243", "foo", "1243", "default"} + }; + + public void testDriverProperties() throws SQLException { + HiveDriver driver = new HiveDriver(); + + for (String[] testValues : URL_PROPERTIES) { + DriverPropertyInfo[] dpi = driver.getPropertyInfo(testValues[0], null); + assertEquals("unexpected DriverPropertyInfo array size", 3, dpi.length); + assertDpi(dpi[0], "HOST", testValues[1]); + assertDpi(dpi[1], "PORT", testValues[2]); + assertDpi(dpi[2], "DBNAME", testValues[3]); + } + + } + + private static void assertDpi(DriverPropertyInfo dpi, String name, String value) { + assertEquals("Invalid DriverPropertyInfo name", name, dpi.name); + assertEquals("Invalid DriverPropertyInfo value", value, dpi.value); + assertEquals("Invalid DriverPropertyInfo required", false, dpi.required); + } }