0


关于Java连接Hive,Spark等服务的Kerberos工具类封装

关于Java连接Hive,Spark等服务的Kerberos工具类封装

idea连接服务器的hive等相关服务的kerberos认证注意事项

  • idea 本地配置,连接服务器;进行kerberos认证,连接hive、HDFS、Spark等服务注意事项:
  1. 本地idea连接Hadoop,需要在本地安装Hadoop的window工具hadoop-3.1.1-winutils-master ,配置环境变量
  2. 配置hosts主机名映射
  3. kerberos认证需要在idea工作目录所在的磁盘的根目录下创建对应的文件夹把keytab放到该目录下,方便认证。
  4. krb5.conf放到对应的目录,如:system.properties中配置了krbConf=/etc/krb5.conf;在项目所在的磁盘根目录下,创建对应的etc目录在下面放配置文件krb5.conf。如:我的idea工作空间在D盘那么就在D盘根目录下创建。
  5. 在resource目录下放置集群的配置文件:hdfs-site.xml、core-site.xml、mapred-site.xml、yarn-site.xml、hive-site.xml配置文件。
  6. 认证注意事项:如果最终是hive用户认证的,那么生成的文件默认为hive的家目录;如果是hdfs用户认证的,生成的文件默认为hdfs的家目录。

properties工具类

importorg.apache.commons.lang3.StringUtils;importorg.apache.commons.logging.Log;importorg.apache.commons.logging.LogFactory;importjava.io.IOException;importjava.io.InputStream;importjava.io.UnsupportedEncodingException;importjava.util.Properties;/**
 * properties工具类
 */publicclassPropertiesUtil{privatestaticLog log =LogFactory.getLog(PropertiesUtil.class);privatestaticProperties props=newProperties();privatestaticString propertyFileName ="/system.properties";static{try{if(props.size()==0){
                log.info("Start read the constv.properties file");InputStream input =PropertiesUtil.class.getResourceAsStream(propertyFileName);
                props.load(input);
                input.close();}}catch(IOException ioe){
               log.error(ioe.getMessage());
               log.debug(ioe);}}publicstaticIntegergetRequiredIntegerProperty(String propertyName){String str =getRequiredStringProperty(propertyName);returnInteger.parseInt(str);}publicstaticStringgetRequiredStringProperty(String propertyName){String str =getStringProperty(propertyName,null);if(StringUtils.isBlank(str)){thrownewRuntimeException(propertyName+"not is property file"+ propertyFileName);}return str;}publicstaticStringgetStringProperty(String propertyName,String defaultValue){if(props.containsKey(propertyName)==true){return(String) props.get(propertyName);}return defaultValue;}publicstaticStringgetIntegerProperty(String propertyName,String defaultValue,String encoding){if(props.containsKey(propertyName)==true){//编码转换,从ISO8859-1转向指定的编码String value=(String) props.get(propertyName);try{
                value =newString(value.getBytes("ISO8859-1"), encoding);}catch(UnsupportedEncodingException e){
                e.printStackTrace();}return  value;}return defaultValue;}}

线程池调度工具类

importjava.util.concurrent.ThreadFactory;importjava.util.concurrent.atomic.AtomicInteger;publicclassScheduledThreadFactoryimplementsThreadFactory{privatestaticfinalAtomicInteger poolNumber =newAtomicInteger(1);privatefinalThreadGroup group;privatefinalAtomicInteger threadNumber =newAtomicInteger(1);privatefinalString namePrefix;publicScheduledThreadFactory(){SecurityManager s=System.getSecurityManager();
        group =(s !=null)? s.getThreadGroup():Thread.currentThread().getThreadGroup();
        namePrefix ="Scheduled Pool-"+ poolNumber.getAndIncrement()+"-Thread-";}publicThreadnewThread(Runnable r){Thread t =newThread(group,r,namePrefix+threadNumber.getAndIncrement());/*
        * 设置为守护进程,所在的jar执行完就退出,如果不是守护进程,在linux运行时,即使业务进程执行完成,这个认证进程也不会关闭。
        * */
         t.setDaemon(true);//这个是线程默认的优先级 Thread.NORM_PRIORITYif(t.getPriority()!=Thread.NORM_PRIORITY){
             t.setPriority(Thread.NORM_PRIORITY);}return t;}}

Kerberos认证工具类

importcom.xxxx.utils.PropertiesUtil;importorg.apache.commons.lang.StringUtils;importorg.apache.hadoop.conf.Configuration;importorg.apache.hadoop.security.UserGroupInformation;importjava.io.IOException;importjava.util.concurrent.Executors;importjava.util.concurrent.ScheduledExecutorService;importjava.util.concurrent.TimeUnit;publicclassKerberosAuthen{privatestaticScheduledExecutorService scheduledExecutor =Executors.newScheduledThreadPool(1,newScheduledThreadFactory());publicstaticvoidkerberosAuthen(){krbAuth();/*
        * 每5分钟执行一次向kerberos进行认证的方法
        * */
        scheduledExecutor.scheduleAtFixedRate(()->krbAuth(),5L,5L,TimeUnit.MINUTES);}/*
     向kerberos认证
  * */privatestaticvoidkrbAuth(){String krbConf =PropertiesUtil.getRequiredStringProperty("krb.conf");String krbKeytab =PropertiesUtil.getRequiredStringProperty("hive.krb.keytab");String krbPrincipal =PropertiesUtil.getRequiredStringProperty("hive.krb.principal");if(StringUtils.isEmpty(krbConf)||StringUtils.isEmpty(krbKeytab)||StringUtils.isEmpty(krbPrincipal)){thrownewRuntimeException("------------------------Kerberos认证文件不存在--------------------------");}//java 程序本身自带kerberos客户端,需要krbConf. 可以进行当前节点的kerberos认证System.setProperty("java.security.krb5.conf",krbConf);Configuration configuration =newConfiguration();
        configuration.set("hadoop.security.authorization","kerberos");//指定keytab文件和principal,为当前java程序配置认证
        configuration.set("keytab.file",krbKeytab);
        configuration.setBoolean("hadoop.security.authorization",true);
        configuration.set("kerberos.principal",krbPrincipal);try{UserGroupInformation.setConfiguration(configuration);UserGroupInformation.loginUserFromKeytab(krbPrincipal,krbKeytab);}catch(IOException ioe){System.err.println(ioe.getMessage());}}}

properties配置文件

  • conf.properties示例:
krb.conf=/etc/krb5.conf
hive.krb.key=/opt/keytabs/hive.keytab
[email protected]
标签: java spark hive

本文转载自: https://blog.csdn.net/m0_46168848/article/details/129333412
版权归原作者 岁月的眸 所有, 如有侵权,请联系我们删除。

“关于Java连接Hive,Spark等服务的Kerberos工具类封装”的评论:

还没有评论