新建项目后在pom.xml中添加依赖:
<dependency>
<groupId>org.Apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<version>2.0.5</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>2.0.5</version>
</dependency>
创建HBase_DDL类
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceExistException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import JAVA.io.IOException;
public class HBase_DDL {
//TODO 判断表是否存在
public static boolean isTableExist(String tableName) throws IOException {
//1.创建配置信息并配置
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum", "hadoop102,hadoop103,hadoop104");
//2.获取与HBase的连接
Connection connection = ConnectionFactory.createConnection(configuration);
//3.获取DDL操作对象
Admin admin = connection.getAdmin();
//4.判断表是否存在操作
boolean exists = admin.tableExists(TableName.valueOf(tableName));
//5.关闭连接
admin.close();
connection.close();
//6.返回结果
return exists;
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceExistException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
public class HBase_DDL {
//TODO 创建表
public static void createTable(String tableName, String... cfs) throws IOException {
//1.判断是否存在列族信息
if (cfs.length <= 0) {
System.out.println("请设置列族信息!");
return;
}
//2.判断表是否存在
if (isTableExist(tableName)) {
System.out.println("需要创建的表已存在!");
return;
}
//3.创建配置信息并配置
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum", "hadoop102,hadoop103,hadoop104");
//4.获取与HBase的连接
Connection connection = ConnectionFactory.createConnection(configuration);
//5.获取DDL操作对象
Admin admin = connection.getAdmin();
//6.创建表描述器构造器
TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName));
//7.循环添加列族信息
for (String cf : cfs) {
ColumnFamilyDescriptorBuilder columnFamilyDescriptorBuilder = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(cf));
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptorBuilder.build());
}
//8.执行创建表的操作
admin.createTable(tableDescriptorBuilder.build());
//9.关闭资源
admin.close();
connection.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceExistException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
public class HBase_DDL {
//TODO 删除表
public static void dropTable(String tableName) throws IOException {
//1.判断表是否存在
if (!isTableExist(tableName)) {
System.out.println("需要删除的表不存在!");
return;
}
//2.创建配置信息并配置
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum", "hadoop102,hadoop103,hadoop104");
//3.获取与HBase的连接
Connection connection = ConnectionFactory.createConnection(configuration);
//4.获取DDL操作对象
Admin admin = connection.getAdmin();
//5.使表下线
TableName name = TableName.valueOf(tableName);
admin.disableTable(name);
//6.执行删除表操作
admin.deleteTable(name);
//7.关闭资源
admin.close();
connection.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceExistException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
public class HBase_DDL {
//TODO 创建命名空间
public static void createNameSpace(String ns) throws IOException {
//1.创建配置信息并配置
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum", "hadoop102,hadoop103,hadoop104");
//2.获取与HBase的连接
Connection connection = ConnectionFactory.createConnection(configuration);
//3.获取DDL操作对象
Admin admin = connection.getAdmin();
//4.创建命名空间描述器
NamespaceDescriptor namespaceDescriptor = NamespaceDescriptor.create(ns).build();
//5.执行创建命名空间操作
try {
admin.createNamespace(namespaceDescriptor);
} catch (NamespaceExistException e) {
System.out.println("命名空间已存在!");
} catch (Exception e) {
e.printStackTrace();
}
//6.关闭连接
admin.close();
connection.close();
}
}
创建类HBase_DML
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
public class HBase_DML {
//TODO 插入数据
public static void putData(String tableName, String rowKey, String cf, String cn, String value) throws IOException {
//1.获取配置信息并设置连接参数
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum", "hadoop102,hadoop103,hadoop104");
//2.获取连接
Connection connection = ConnectionFactory.createConnection(configuration);
//3.获取表的连接
Table table = connection.getTable(TableName.valueOf(tableName));
//4.创建Put对象
Put put = new Put(Bytes.toBytes(rowKey));
//5.放入数据
put.addColumn(Bytes.toBytes(cf), Bytes.toBytes(cn), Bytes.toBytes(value));
//6.执行插入数据操作
table.put(put);
//7.关闭连接
table.close();
connection.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
public class HBase_DML {
//TODO 单条数据查询(GET)
public static void getDate(String tableName, String rowKey, String cf, String cn) throws IOException {
//1.获取配置信息并设置连接参数
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum", "hadoop102,hadoop103,hadoop104");
//2.获取连接
Connection connection = ConnectionFactory.createConnection(configuration);
//3.获取表的连接
Table table = connection.getTable(TableName.valueOf(tableName));
//4.创建Get对象
Get get = new Get(Bytes.toBytes(rowKey));
// 指定列族查询
// get.addFamily(Bytes.toBytes(cf));
// 指定列族:列查询
// get.addColumn(Bytes.toBytes(cf), Bytes.toBytes(cn));
//5.查询数据
Result result = table.get(get);
//6.解析result
for (Cell cell : result.rawCells()) {
System.out.println("CF:" + Bytes.toString(cell.getFamilyArray()) +
",CN:" + Bytes.toString(cell.getQualifierArray()) +
",Value:" + Bytes.toString(cell.getValueArray()));
}
//7.关闭连接
table.close();
connection.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
public class HBase_DML {
//TODO 扫描数据(Scan)
public static void scanTable(String tableName) throws IOException {
//1.获取配置信息并设置连接参数
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum", "hadoop102,hadoop103,hadoop104");
//2.获取连接
Connection connection = ConnectionFactory.createConnection(configuration);
//3.获取表的连接
Table table = connection.getTable(TableName.valueOf(tableName));
//4.创建Scan对象
Scan scan = new Scan();
//5.扫描数据
ResultScanner results = table.getScanner(scan);
//6.解析results
for (Result result : results) {
for (Cell cell : result.rawCells()) {
System.out.println("CF:" + Bytes.toString(cell.getFamilyArray()) +
",CN:" + Bytes.toString(cell.getQualifierArray()) +
",Value:" + Bytes.toString(cell.getValueArray()));
}
}
//7.关闭资源
table.close();
connection.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
public class HBase_DML {
//TODO 删除数据
public static void deletaData(String tableName, String rowKey, String cf, String cn) throws IOException {
//1.获取配置信息并设置连接参数
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum", "hadoop102,hadoop103,hadoop104");
//2.获取连接
Connection connection = ConnectionFactory.createConnection(configuration);
//3.获取表的连接
Table table = connection.getTable(TableName.valueOf(tableName));
//4.创建Delete对象
Delete delete = new Delete(Bytes.toBytes(rowKey));
// 指定列族删除数据
// delete.addFamily(Bytes.toBytes(cf));
// 指定列族:列删除数据(所有版本)
// delete.addColumn(Bytes.toBytes(cf), Bytes.toBytes(cn));
// 指定列族:列删除数据(指定版本)
// delete.addColumns(Bytes.toBytes(cf), Bytes.toBytes(cn));
//5.执行删除数据操作
table.delete(delete);
//6.关闭资源
table.close();
connection.close();
}
}
你的赞,我都当成喜欢。
专注分享大数据技术&智能技术&基础&实战,干货,资料。
关注本号,让更多人了解技术,让技术造福更多人。欢迎转发传播,感谢您的关注,谢谢。