java通过kerberos权限认证集成hive,并操作hive实现hive库和表、分区表的增删查等功能
1、pom文件中引入hive包
<dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-client</artifactId><version>2.7.3</version></dependency><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-common</artifactId><version>2.7.3</version></dependency><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-hdfs</artifactId><version>2.7.3</version></dependency><dependency><groupId>org.apache.hive</groupId><artifactId>hive-jdbc</artifactId><version>2.1.0</version></dependency>
2、从集群中下载认证过可以登录的keytab文件,以及krb5.conf文件还有core-site.xml、hdfs-site.xml、hive-site.xml、yarn-site.xml放到项目的resources目录下
(xml文件按照自己项目需要下载)
3、代码实现
packagecom.example.demo.hive;importcom.beust.jcommander.internal.Lists;importorg.apache.hadoop.conf.Configuration;importorg.apache.hadoop.security.UserGroupInformation;importorg.apache.logging.log4j.LogManager;importorg.apache.logging.log4j.Logger;importjava.io.IOException;importjava.sql.*;importjava.text.SimpleDateFormat;importjava.util.ArrayList;importjava.util.Date;importjava.util.LinkedHashMap;importjava.util.List;publicclassHiveUtil{privatefinalstaticLogger logger =LogManager.getLogger(HiveUtil.class);privatestaticSimpleDateFormat sdf =newSimpleDateFormat("yyyy-MM");privatestaticConnection connection =null;publicstaticvoidmain(String[] args){
connection =getConnection();//createDatabase();//创建数据库//dropDatabase();//删除数据库//createTable();//创建表//dropTable();//删除表//insertTableData();//插入数据descTable();//查看表结构//getTableData();//查询表数据//loadData();//导入数据/* List<LinkedHashMap<String, Object>> tables = querySql("show databases");
for (LinkedHashMap<String, Object> table : tables) {
String s = JSONObject.toJSONString(table);
System.out.println(s);
}*/}publicstaticConnectiongetConnection(){System.setProperty("java.security.krb5.conf","src/main/resources/krb5.conf");Configuration config =newConfiguration();UserGroupInformation.setConfiguration(config);try{UserGroupInformation.loginUserFromKeytab("hive","src/main/resources/hive.keytab");String driver ="org.apache.hive.jdbc.HiveDriver";String jdbc ="jdbc:hive2://192.168.1.110:10000/default;principal=hive/[email protected]";String user ="hive";String pass ="123456";try{Class.forName(driver);}catch(ClassNotFoundException e){
e.printStackTrace();}try{
connection =DriverManager.getConnection(jdbc, user, pass);System.out.println("连接成功");}catch(SQLException e){
e.printStackTrace();}}catch(IOException e){
e.printStackTrace();}return connection;}publicstaticList<LinkedHashMap<String,Object>>querySql(String sql){// 执行sqlStatement statement =null;ResultSet resultSet =null;try{
statement = connection.createStatement();
resultSet = statement.executeQuery(sql);returnbuildListMap(resultSet);}catch(SQLException e){
e.printStackTrace();}finally{// 关闭close(resultSet, statement);}returnnull;}/**
* @Description 功能描述:将resultSet构造为List<Map>
**/publicstaticList<LinkedHashMap<String,Object>>buildListMap(ResultSet resultSet)throwsSQLException{if(resultSet ==null){returnLists.newArrayList();}List<LinkedHashMap<String,Object>> resultList =newArrayList<>();// 获取元数据ResultSetMetaData metaData = resultSet.getMetaData();while(resultSet.next()){// 获取列数int columnCount = metaData.getColumnCount();LinkedHashMap<String,Object> map =newLinkedHashMap<>();for(int i =0; i < columnCount; i++){String columnName = metaData.getColumnName(i +1);// 过滤掉查询的结果包含序号的if("mm.row_num_01".equalsIgnoreCase(columnName)||"row_num_01".equalsIgnoreCase(columnName)){continue;}// 去除hive查询结果的mm.别名前缀if(columnName.startsWith("mm.")){
columnName = columnName.substring(columnName.indexOf(".")+1);}Object object = resultSet.getObject(columnName);
map.put(columnName, object);}
resultList.add(map);}return resultList;}/**
* 创建hive数据库
*/publicstaticvoidcreateDatabase(){//创建hive库时指定location/* create database if not exists hive_user_location
location "/user/hive/hive_location_db"
with dbproperties ('createby'='linda');*/String sql ="create database if not exists test_db";System.out.println("创建数据库,脚本:"+sql);try(Statement statement = connection.createStatement()){
statement.execute(sql);System.out.println("创建数据库成功");}catch(SQLException e){
e.printStackTrace();}}/**
* 创建表
*/publicstaticvoidcreateTable(){//创建分区表//String sql = "create table user_partition(deptno int, dname string) partitioned by (month string) row format delimited fields terminated by '\t'";String sql ="create table user_tb(id int, name string) row format delimited fields terminated by ','";
logger.info("创建表,脚本:{}", sql);try(Statement statement = connection.createStatement()){
statement.execute(sql);
logger.info("创建表成功");}catch(SQLException e){
logger.error("创建表出错", e);}}/**
* 查看表结构
*/publicstaticvoiddescTable(){/***
* 查看表已有分区信息
* show partitions 表名;
*/String sql ="desc formatted user_partition";// 查看分区表结构:desc formatted user_tb;System.out.println("查看表结构,脚本:"+sql);try{Statement statement = connection.createStatement();ResultSet rs = statement.executeQuery(sql);while(rs.next()){
logger.info("字段名:{},类型:{}",rs.getString(1),rs.getString(2));}}catch(SQLException e){
logger.error("查看表结构出错", e);}}/**
* 查询表数据
*/publicstaticvoidgetTableData(){Date date =newDate();String month = sdf.format(date);//查看分区表数据//String sql = "select * from user_partition where month='"+month+"'";String sql ="select * from user_tb where id=1";System.out.println("查看表数据,脚本:"+sql);try{Statement statement = connection.createStatement();ResultSet rs = statement.executeQuery(sql);while(rs.next()){
logger.info("字段名:{},数据:{}",rs.getString(1),rs.getString(2));}}catch(SQLException e){
logger.error("查看表数据出错", e);}}/**
* 删除表
*/publicstaticvoiddropTable(){String sql ="drop table if exists user_tb";
logger.info("删除表,脚本:{}", sql);try(Statement statement = connection.createStatement()){
statement.execute(sql);
logger.info("删除表成功");}catch(SQLException e){
logger.error("删除表出错", e);}}/**
* 删除数据库
*/publicstaticvoiddropDatabase(){String sql ="drop database if exists test_db";
logger.info("删除数据库,脚本:{}", sql);try(Statement statement = connection.createStatement()){
statement.execute(sql);
logger.info("删除数据库成功");}catch(SQLException e){
logger.error("删除数据库出错", e);}}/**
* 插入数据
*/publicstaticvoidinsertTableData(){Date date =newDate();String month = sdf.format(date);//插入分区表数据//String sql = "insert into table user_partition partition(month='"+month+"') values (1,'分区数据')";String sql ="insert into table user_tb values (1,'张三')";
logger.info("插入数据,脚本:{}", sql);try(Statement statement = connection.createStatement()){
statement.execute(sql);
logger.info("插入数据成功");}catch(SQLException e){
logger.error("插入数据出错", e);}}/**
* 导入数据,data.txt中的数据为格式为:<br>
* 1,张三<br>
* 2,李四
*/publicstaticvoidloadData(){String sql ="load data local inpath '/home/data.txt' overwrite into table user_tb";
logger.info("导入数据,脚本:{}", sql);try(Statement statement = connection.createStatement()){
statement.execute(sql);
logger.info("导入数据成功");}catch(SQLException e){
logger.error("导入数据出错", e);}}/**
* 关闭对象 传入多个时注意顺序, 需要先关闭哪个就传在参数前面
* @param objs 对象动态数组
*/publicstaticvoidclose(Object... objs){if(objs ==null|| objs.length ==0){return;}for(Object obj : objs){if(obj instanceofStatement){try{((Statement) obj).close();}catch(Exception e){
e.printStackTrace();}}if(obj instanceofResultSet){try{((ResultSet) obj).close();}catch(Exception e){
e.printStackTrace();}}if(obj instanceofConnection){try{((Connection) obj).close();}catch(Exception e){
e.printStackTrace();}}}}}
版权归原作者 欧浪 所有, 如有侵权,请联系我们删除。