hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From Apache Wiki <wikidi...@apache.org>
Subject [Hadoop Wiki] Trivial Update of "Hive/HiveClient" by RaghothamMurthy
Date Wed, 03 Jun 2009 05:42:12 GMT
Dear Wiki user,

You have subscribed to a wiki page or wiki category on "Hadoop Wiki" for change notification.

The following page has been changed by RaghothamMurthy:
http://wiki.apache.org/hadoop/Hive/HiveClient

------------------------------------------------------------------------------
  For embedded mode, uri is just "jdbc:hive://". For standalone server, uri is "jdbc:hive://host:port/dbname"
where host and port are determined by where the hive server is run. For example, "jdbc:hive://localhost:10000/default".
Currently, the only dbname supported is "default".
  
  {{{
+ import java.sql.SQLException;
+ import java.sql.Connection;
+ import java.sql.ResultSet;
+ import java.sql.Statement;
+ import java.sql.DriverManager;
  
- import java.sql.*;
+ public class HiveJdbcClient {
+   private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
+   
+   /** 
+    * @param args
+    * @throws SQLException 
+    */  
+   public static void main(String[] args) throws SQLException {
+       try {
+       Class.forName(driverName);
+     } catch (ClassNotFoundException e) {
+       // TODO Auto-generated catch block
+       e.printStackTrace();
+       System.exit(1);
+     }   
+     Connection con = DriverManager.getConnection("jdbc:hive://localhost:10000/default",
"", "");
+     Statement stmt = con.createStatement();
+     String tableName = "testHiveDriverTable";
+     stmt.executeQuery("drop table " + tableName);
+     ResultSet res = stmt.executeQuery("create table " + tableName + " (key int, value string)");
+     // show tables
+     String sql = "show tables '" + tableName + "'";
+     System.out.println("Running: " + sql);
+     res = stmt.executeQuery(sql);
+     if (res.next()) {
+       System.out.println(res.getString(1));
+     }   
+     // describe table
+     sql = "describe " + tableName;
+     System.out.println("Running: " + sql);
+     res = stmt.executeQuery(sql);
+     while (res.next()) {
+       System.out.println(res.getString(1) + "\t" + res.getString(2));
+     }   
+         
+     // load data into table
+     // NOTE: filepath has to be local to the hive server
+     // NOTE: /tmp/a.txt is a ctrl-A separated file with two fields per line
+     String filepath = "/tmp/a.txt";
+     sql = "load data local inpath '" + filepath + "' into table " + tableName;
+     System.out.println("Running: " + sql);
+     res = stmt.executeQuery(sql);
+         
+     // select * query
+     sql = "select * from " + tableName;
+     System.out.println("Running: " + sql);
+     res = stmt.executeQuery(sql);
+     while (res.next()) {
+       System.out.println(String.valueOf(res.getInt(1)) + "\t" + res.getString(2));
+     }   
+         
+     // regular hive query
+     sql = "select count(1) from " + tableName;
+     System.out.println("Running: " + sql);
+     res = stmt.executeQuery(sql);
+     while (res.next()) {
+       System.out.println(res.getString(1));
+     }   
+   }
+ }
  
+ # Then on the command-line
+ $ javac HiveJdbcClient.java
- class HiveJdbcClient {
- public static void main(String[] args) {
-   String uri = "jdbc:hive://"; // for embedded mode
-   String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
-   // load the hive jdbc driver
-   Class.forName(driverName);
-   // currently do not support username and password
-   Connection con = DriverManager.getConnection(uri, ""/*user*/, ""/*passwd*/); 
-   Statement stmt = con.createStatement();
  
-   // DDL Statement
-   stmt.executeQuery("CREATE TABLE r(a STRING, b INT, c DOUBLE)");
-   stmt.executeQuery("CREATE TABLE s(a STRING, b INT, c DOUBLE)");
+ # To run the program, we need the following jars in the classpath
+ # from hive/build/dist/lib
+ #     commons-logging-*.jar
+ #     hive_jdbc.jar
+ #     hive_metastore.jar
+ #     hive_serde.jar
+ #     hive_service.jar
+ #     libfb303.jar
+ #     libthrift.jar
+ #
+ # from hadoop/build
+ #     hadoop-*-core.jar
  
+ $ java -cp $CLASSPATH HiveJdbcClient
-   // Metadata 
-   ResultSet res = stmt.executeQuery("SHOW TABLES");
-   // fetch results
-   while (res.next()) {
-    // Column indexes start from 1
-    System.out.println("Table name: " + res.getString(1));
-   }
  
-   // Metadata 
-   ResultSet res = stmt.executeQuery("DESCRIBE r");
-   while (res.next()) {
-    System.out.println("Column Name: " + res.getString(1));
-    System.out.println("Column Type: " + res.getString(2));
-    System.out.println("Comment: " + res.getString(3));
-   }
- 
-   // DML Statement
-   stmt.executeQuery("LOAD TABLE LOCAL INPATH '/path/to/file' INTO TABLE r");
- 
-   // Select
-   res = stmt.executeQuery("SELECT * FROM r");
-   while (res.next()) {
-    System.out.println(res.getString(1));
-    System.out.println(res.getInt(2));
-    System.out.println(res.getDouble(3));
-   }
- 
-   // Insert
-   res = stmt.executeQuery("INSERT OVERWRITE TABLE s SELECT * FROM r WHERE b > 10");
- 
-  }
- }
  }}}
  
  = Python =

Mime
View raw message