第1关:批量处理packagestep1;importjava.util.ArrayList;importjava.util.List;importorg.apache.hadoop.conf.Configuration;importorg.apache.hadoop.hbase.Cell;importorg.apache.hadoop.hbase.CellUtil;importorg.apache.hadoop.hbase.HBaseConfiguration;importorg.apache.hadoop.hbase.TableName;importorg.apache.hadoop.hbase.client.*;importorg.apache.hadoop.hbase.client.coprocessor.Batch;importorg.apache.hadoop.hbase.util.Bytes;publicclassTask{publicObject[]batchOp(Stringtablename)throwsException{/*********Begin*********/Configurationconf=HBaseConfiguration.create();Connectionconn=ConnectionFactory.createConnection(conf);Tabletable=conn.getTable(TableName.valueOf(tablename));ListRowrows=newArrayList();//删除操作Deletedelete=newDelete(Bytes.toBytes(row1));Deletedelete2=newDelete(Bytes.toBytes(row2));rows.add(delete);rows.add(delete2);//获取操作Getget=newGet(Bytes.toBytes(row3));Getget2=newGet(Bytes.toBytes(row10));rows.add(get);rows.add(get2);//定义结果数组长度Object[]results=newObject[rows.size()];table.batch(rows,results);//这是一个同步的操作,批量操作的结果将会在操作之后放在results中//delete和put操作的结果为NONE即没有结果returnresults;/*********End*********/}}命令行:启动下Hadoop和hbase第2关:扫描表中所有的数据packagestep2;importorg.apache.hadoop.conf.Configuration;importorg.apache.hadoop.hbase.Cell;importorg.apache.hadoop.hbase.CellUtil;importorg.apache.hadoop.hbase.HBaseConfiguration;importorg.apache.hadoop.hbase.TableName;importorg.apache.hadoop.hbase.client.*;publicclassTask{publicvoidscanTable(Stringtablename)throwsException{/*********Begin*********/Configurationconf=HBaseConfiguration.create();Connectionconn=ConnectionFactory.createConnection(conf);Tabletable=conn.getTable(TableName.valueOf(tablename));Scanscan=newScan();ResultScannerscanner=table.getScanner(scan);for(Resultresult:scanner){for(Cellcell:result.listCells()){System.out.println(newString(CellUtil.cloneValue(cell),utf-8));}}/*********End*********/}}第3关:在扫描中使用缓存和批量参数packagestep3;importorg.apache.hadoop.conf.Configuration;importorg.apache.hadoop.hbase.Cell;importorg.apache.hadoop.hbase.CellUtil;importorg.apache.hadoop.hbase.HBaseConfiguration;importorg.apache.hadoop.hbase.TableName;importorg.apache.hadoop.hbase.client.*;importorg.apache.hadoop.hbase.util.Bytes;publicclassTask{publicvoidscanTable(Stringtablename)throwsException{/*********Begin*********/Configurationconf=HBaseConfiguration.create();Connectionconn=ConnectionFactory.createConnection(conf);Tabletable=conn.getTable(TableName.valueOf(tablename));Scanscan=newScan();scan.setCaching(200);scan.setStartRow(Bytes.toBytes(1));//设置从row3开始扫描scan.setStopRow(Bytes.toBytes(row199));ResultScannerscanner=table.getScanner(scan);for(Resultresult:scanner){for(Cellcell:result.listCells()){System.out.println(newString(CellUtil.cloneValue(cell),utf-8));}}/*********End*********/}}