1.建一个maven项目:(在idea中不需要,只需要建一个java项目->再右键项目->add framework support->找到maven勾上)
2.在pom.xml导入:
代码语言:javascript复制<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>HDFS_HBase_HiveApi</groupId>
<artifactId>HDFS_HBase_HiveApi</artifactId>
<version>1.0-SNAPSHOT</version>
<repositories><!-- 代码库 -->
<repository>
<id>aliyun</id>
<url>http://maven.aliyun.com/nexus/content/groups/public/</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
<updatePolicy>never</updatePolicy>
</snapshots>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>2.1.0</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.6</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.14.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.7.5</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.7.5</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.7.5</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>2.7.5</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>RELEASE</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>RELEASE</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>7.4.0</version>
<scope>compile</scope>
</dependency>
</dependencies>
</project>
(这里的方法在运行的时候要开启Hbase集群服务) 启动HBase 由于伪分布式下的 HBase 依赖 HDFS ,因此我们需要先启动 HDFS :
代码语言:javascript复制start-dfs.sh
然后启动 HBase :
代码语言:javascript复制start-hbase.sh
3.然后就是代码实现:
代码语言:javascript复制package com.company.HDFS;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import java.io.IOException;
/**
* @Description: hbase的javaAPI
*/
public class HbaseDemo1 {
/**
* @Description: createTable():创建表的方法
* @Param: 0
* @return: 0
*/
@Test
public void createTable() throws IOException {
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum", "node1:2181");
//建立连接
Connection conn = ConnectionFactory.createConnection(conf);
//获取表的管理类
Admin admin = conn.getAdmin();
//定义表
HTableDescriptor hTableDescriptor=new HTableDescriptor(TableName.valueOf("person"));
//定义列簇
HColumnDescriptor hColumnDescriptor =new HColumnDescriptor("info");
//讲列簇定义到表中
hTableDescriptor.addFamily(hColumnDescriptor);
//执行建表操作
admin.createTable(hTableDescriptor);
admin.close();
conn.close();
}
/**
* @Description: 向Hbase中插入数据的方法
* @Param: null
* @return: null
*/
@Test
public void put(){
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum","node1:2181");
try {
//建立连接
Connection conn= ConnectionFactory.createConnection(conf);
//获取表
Table table=conn.getTable(TableName.valueOf("person"));
//用行键实例化put
Put put= new Put("rk001".getBytes());
//指定列簇名,列名,和值
put.addColumn("info".getBytes(),"name".getBytes(),"zhangsan".getBytes());
table.put(put);
table.close();
conn.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* @Description: get()从Hbase中读取数据的方法
* @Param: 1
* @return: 1
*/
@Test
public void get() throws IOException {
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum","node1:2181");
//建立连接
Connection conn =ConnectionFactory.createConnection(conf);
//获取表
Table table = conn.getTable(TableName.valueOf("person"));
//用行建实例化get
Get get = new Get("rk001".getBytes());
//增加列簇和列名条件
get.addColumn("info".getBytes(),"name".getBytes());
//执行,返回结果
Result result = table.get(get);
//取出结果
String valString= Bytes.toString(result.getValue("info".getBytes(),"name".getBytes()));
System.out.println(valString);
//关闭连接
table.close();
conn.close();
}
/**
* @Description: scan()查询一个表的所有信息
* @Param: 1
* @return: 1
*/
@Test
public void scan() throws IOException {
Configuration conf=HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum", "node1:2181");
//建立连接
Connection conn=ConnectionFactory.createConnection(conf);
//获取表
Table table=conn.getTable(TableName.valueOf("person"));
//初始化Scan实例
Scan scan=new Scan();
//增加过滤条件
scan.addColumn("info".getBytes(), "name".getBytes());
//返回结果
ResultScanner rss=table.getScanner(scan);
//迭代并取出结果
for(Result rs:rss){
String valStr=Bytes.toString(rs.getValue("info".getBytes(), "name".getBytes()));
System.out.println(valStr);
}
//关闭连接
table.close();
conn.close();
}
/**
* @Description: delete()删除表中的信息
* @Param: 1
* @return: 1
*/
@Test
public void delete() throws IOException {
Configuration conf=HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum", "node1:2181");
//建立连接
Connection conn=ConnectionFactory.createConnection(conf);
//获取表
Table table=conn.getTable(TableName.valueOf("person"));
// 用行键来实例化Delete实例
Delete del = new Delete("rk0001".getBytes());
// 执行删除
table.delete(del);
//关闭连接
table.close();
conn.close();
}
}