0


【简单易懂版】使用IDEA操作Hadoop(增删改查)

文章目录


前提:服务器中已经配置好了hadoop

本人亲测,以下代码已经跑通,基础功能都可以完成!!!希望对大家有用!!!在这里插入图片描述

一、引入hdfs依赖

  1. 创建一个maven项目cn.et
  2. 本地maven配置阿里镜像,用于快速下载依赖(重启加载)
  3. pow文件中引入hadoop依赖
<!--    hdfs依赖    --><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-client</artifactId><version>3.1.3</version></dependency>

二、创建hdfs工具类

创建一个HdfsApiUtils 类,用于实现hdfs的增删改查:

  1. 获取hdfs的代码需要封装到静态代码块(先执行而且只执行一次)
  2. 创建文件或目录(mkdirs,建议写绝对路径hdfs://地址:9000/新目录
  3. 删除文件或目录(delete)
  4. 修改或移动文件或目录(rename)
  5. 查询当前路径下所有的文件或目录(显示时间和MB)
packagecom.example.springbootonline.utils;importorg.apache.hadoop.conf.Configuration;importorg.apache.hadoop.fs.*;importorg.springframework.beans.factory.annotation.Value;importorg.springframework.context.annotation.Bean;importorg.springframework.stereotype.Component;importjava.io.IOException;importjava.net.URI;importjava.util.List;/**
 * Created with IntelliJ IDEA.
 *
 * @Author: Print
 * @Date: 2023/07/17/10:07
 * @Description:idea集成hdfs;地址hdfs://地址:9000
 */@ComponentpublicclassHdfsApiUtils{privatestaticString hdfsUrl ="hdfs://地址:9000";privatestaticString hdfsUsername ="root";privatestaticFileSystem hdfs;static{Configuration conf =newConfiguration();// 上传到云服务器需要配置下面这个句话
        conf.set("dfs.client.use.datanode.hostname","true");try{
            hdfs =FileSystem.get(URI.create(hdfsUrl), conf, hdfsUsername);}catch(Exception e){thrownewRuntimeException(e);}}/**
     * 创建文件或目录
     */publicbooleanmkdir(String path){boolean res =false;try{
            hdfs.mkdirs(newPath(path));
            res =true;}catch(IOException e){thrownewRuntimeException(e);}return res;}/**
     * 删除文件或目录
     */publicbooleandelete(String path){boolean res =false;try{
            res = hdfs.delete(newPath(path),true);}catch(IOException e){thrownewRuntimeException(e);}return res;}/**
     * 修改或移动文件或目录
     */publicbooleanrename(String oldFile,String newFlie){boolean res =false;try{
            res = hdfs.rename(newPath(oldFile),newPath(newFlie));}catch(IOException e){thrownewRuntimeException(e);}return res;}/**
     * 查询当前路径下所有的文件或目录(只查当前目录下)
     */publicFileStatus[]findCurrent(String path){FileStatus[] res =null;try{
            res = hdfs.listStatus(newPath(path));}catch(IOException e){thrownewRuntimeException(e);}return res;}/**
     * 查询当前路径下所有的文件或目录(递归查下面所有)
     */publicRemoteIterator<LocatedFileStatus>findAll(String path){RemoteIterator<LocatedFileStatus> iterator =null;try{
            iterator = hdfs.listFiles(newPath(path),true);}catch(IOException e){thrownewRuntimeException(e);}return iterator;}/**
     * 上传
     */publicbooleanupload(String localPath,String path){boolean res =false;try{
            hdfs.copyFromLocalFile(newPath(localPath),newPath(path));
            res =true;}catch(IOException e){
            res =false;thrownewRuntimeException(e);}return res;}/**
     * 下载
     */publicbooleandownload(String hdfsPath,String localPath){boolean res =false;try{
            hdfs.copyToLocalFile(newPath(hdfsPath),newPath(localPath));
            res =true;}catch(IOException e){thrownewRuntimeException(e);}return res;}}

三、测试hdfs工具类

importcom.example.springbootonline.utils.HdfsApiUtils;importorg.apache.hadoop.fs.FileStatus;importorg.apache.hadoop.fs.FileSystem;importorg.apache.hadoop.fs.LocatedFileStatus;importorg.apache.hadoop.fs.RemoteIterator;importorg.junit.jupiter.api.Test;importorg.springframework.stereotype.Component;importjava.io.IOException;/**
 * Created with IntelliJ IDEA.
 *
 * @Author: Print
 * @Date: 2023/07/17/10:59
 * @Description:
 */@ComponentpublicclassHdfsApiUtilsTest{HdfsApiUtils hdfsApiUtils =newHdfsApiUtils();@Testpublicvoidmkdir(){String newFile ="/file";System.out.println(hdfsApiUtils.mkdir(newFile));}@Testpublicvoiddelete(){String path ="/aaa";System.out.println(hdfsApiUtils.delete(path));}@Testpublicvoidrename(){String oldFile ="/aaa",newFile ="/newAAA";System.out.println(hdfsApiUtils.rename(oldFile,newFile));}@Testpublicvoidupload(){String localPath ="F:\\Users\\HP\\Videos\\Captures\\demo.mp4",path ="/abc/aaa";System.out.println(hdfsApiUtils.upload(localPath,path));}@TestpublicvoidfindCurrent(){String path ="/file";FileStatus[] fss = hdfsApiUtils.findCurrent(path);for(FileStatus fs:fss){System.out.println(fs.toString()+"\n");}System.out.println();}@TestpublicvoidfindAll()throwsIOException{String path ="/file";RemoteIterator<LocatedFileStatus> iterator = hdfsApiUtils.findAll(path);while(iterator.hasNext()){System.out.println(iterator.next().toString());}}}

反思

好像应该再写一个服务器如何配置hadoop,后面再看有没有时间吧在这里插入图片描述


本文转载自: https://blog.csdn.net/qq_48592827/article/details/131770993
版权归原作者 PRINT! 所有, 如有侵权,请联系我们删除。

“【简单易懂版】使用IDEA操作Hadoop(增删改查)”的评论:

还没有评论