hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From Apache Wiki <wikidi...@apache.org>
Subject [Hadoop Wiki] Update of "Hbase-jdo/example" by Misty
Date Mon, 02 Nov 2015 05:03:56 GMT
Dear Wiki user,

You have subscribed to a wiki page or wiki category on "Hadoop Wiki" for change notification.

The "Hbase-jdo/example" page has been changed by Misty:
https://wiki.apache.org/hadoop/Hbase-jdo/example?action=diff&rev1=4&rev2=5

+ The HBase Wiki is in the process of being decommissioned. The info that used to be on this
page has moved to https://hbase.apache.org/book.html#jdo. Please update your bookmarks.
- ## page was renamed from ncanis/Hbase-jdo example for newbie
- = Some features =
  
-  * simple jdo(used reflection)
-  * HTable pool ( I already know HTablePool in habse )
-  * simple query classes ( insert,delete,update,select)
-  * table sequence generator
- 
- = dependency library =
-  * hbase 0.90 libraries. (http://hbase.apache.org)
- 
-  * commons-beanutils.jar (http://commons.apache.org/) 
-  * commons-pool-1.5.5.jar (http://commons.apache.org/) 
-  * transactional-tableindexed for 0.90 (https://github.com/hbase-trx/hbase-transactional-tableindexed)
-    
- = Download = 
-  * http://code.google.com/p/hbase-jdo/
- 
- = Examples =
- {{{#!java
- 
- package com.apache.hadoop.hbase.client.jdo.examples;
- 
- import java.io.File;
- import java.io.FileInputStream;
- import java.io.InputStream;
- import java.util.Hashtable;
- 
- import org.apache.hadoop.fs.Path;
- import org.apache.hadoop.hbase.client.tableindexed.IndexedTable;
- 
- import com.apache.hadoop.hbase.client.jdo.AbstractHBaseDBO;
- import com.apache.hadoop.hbase.client.jdo.HBaseBigFile;
- import com.apache.hadoop.hbase.client.jdo.HBaseDBOImpl;
- import com.apache.hadoop.hbase.client.jdo.query.DeleteQuery;
- import com.apache.hadoop.hbase.client.jdo.query.HBaseOrder;
- import com.apache.hadoop.hbase.client.jdo.query.HBaseParam;
- import com.apache.hadoop.hbase.client.jdo.query.InsertQuery;
- import com.apache.hadoop.hbase.client.jdo.query.QSearch;
- import com.apache.hadoop.hbase.client.jdo.query.SelectQuery;
- import com.apache.hadoop.hbase.client.jdo.query.UpdateQuery;
- 
- /**
-  * Hbase JDO Example.
-  * 
-  * dependency library.
-  * - commons-beanutils.jar
-  * - commons-pool-1.5.5.jar
-  * - hbase0.90.0-transactionl.jar
-  * 
-  * you can expand Delete,Select,Update,Insert Query classes.
-  * @author ncanis
-  *
-  */
- public class HBaseExample {
-         public static void main(String[] args) throws Exception {
-                 AbstractHBaseDBO dbo = new HBaseDBOImpl();
-                 
-                 //*drop if table is already exist.*
-                 if(dbo.isTableExist("user")){
-                         dbo.deleteTable("user");
-                 }
-                 
-                 //*create table*
-                 dbo.createTableIfNotExist("user",HBaseOrder.DESC,"account");
-                 //dbo.createTableIfNotExist("user",HBaseOrder.ASC,"account");
-                 
-                 //create index.
-                 String[] cols={"id","name"};
-                 dbo.addIndexExistingTable("user","account",cols);
-                 
-                 //insert
-                 InsertQuery insert = dbo.createInsertQuery("user");
-                 UserBean bean = new UserBean();
-                 bean.setFamily("account");
-                 bean.setAge(20);
-                 bean.setEmail("ncanis@gmail.com");
-                 bean.setId("ncanis");
-                 bean.setName("ncanis");
-                 bean.setPassword("1111");
-                 insert.insert(bean);
-                 
-                 //select 1 row
-                 SelectQuery select = dbo.createSelectQuery("user");
-                 UserBean resultBean = (UserBean)select.select(bean.getRow(),UserBean.class);
-                 
-                 // select column value.
-                 String value = (String)select.selectColumn(bean.getRow(),"account","id",String.class);
-                 
-                 // search with option (QSearch has EQUAL, NOT_EQUAL, LIKE)
-                 // select id,password,name,email from account where id='ncanis' limit startRow,20
-                 HBaseParam param = new HBaseParam();
-                 param.setPage(bean.getRow(),20);
-                 param.addColumn("id","password","name","email");
-                 param.addSearchOption("id","ncanis",QSearch.EQUAL);
-                 select.search("account", param, UserBean.class);
-                 
-                 // search column value is existing.
-                 boolean isExist = select.existColumnValue("account","id","ncanis".getBytes());
-                 
-                 // update password.
-                 UpdateQuery update = dbo.createUpdateQuery("user");
-                 Hashtable<String, byte[]> colsTable = new Hashtable<String, byte[]>();
-                 colsTable.put("password","2222".getBytes());
-                 update.update(bean.getRow(),"account",colsTable);
-                 
-                 //delete
-                 DeleteQuery delete = dbo.createDeleteQuery("user");
-                 delete.deleteRow(resultBean.getRow());
-         
-                 ////////////////////////////////////
-                 // etc
-                 
-                 // HTable pool with apache commons pool
-                 // borrow and release. HBasePoolManager(maxActive, minIdle etc..)
-                 IndexedTable table = dbo.getPool().borrow("user");
-                 dbo.getPool().release(table);
-                 
-                 // upload bigFile by hadoop directly.
-                 HBaseBigFile bigFile = new HBaseBigFile();
-                 File file = new File("doc/movie.avi");
-                 FileInputStream fis = new FileInputStream(file);
-                 Path rootPath = new Path("/files/");
-                 String filename = "movie.avi";
-                 bigFile.uploadFile(rootPath,filename,fis,true);
-                 
-                 // receive file stream from hadoop.
-                 Path p = new Path(rootPath,filename);
-                 InputStream is = bigFile.path2Stream(p,4096);
-                 
-                 
-         }
- }
- }}}
- 

Mime
View raw message