目录
HDFS Java API操作
这里使用Junit包的@Before、@Test方法进行调试测试
创建目录
**
fs.mkdirs()
**
//第一步,获取Hadoop FileSystem对象privateFileSystem fs =null;//初始化环境变量@Beforepublicvoidinit()throwsURISyntaxException{/*
* new URI("hdfs:192.168.206.3:9000"):连接Hadoop
* Configuration() : 使用Hadoop默认配置
* "root":登录用户
*/Configuration conf =newConfiguration();
fs =FileSystem.get(newURI("hdfs://192.168.206.3:9000"),conf);}//创建目录@TestpublicvoidtestMkdir(){boolean flag = fs.mkdirs(newPath("/javaAPI/mk/dir1/dir2"));System.out.println(flag ?"创建成功":"创建失败");}
显示目录列表
//第一步,获取Hadoop FileSystem对象privateFileSystem fs =null;privateList<String> hdfsPathsLists;//第二步,初始化环境变量@Beforepublicvoidinit()throwsURISyntaxException,IOException,InterruptedException{
fs =FileSystem.get(newURI("hdfs://192.168.206.3:9000"),newConfiguration(),"root");}//显示目录@TestpublicvoidgetDirList()throwsIOException{//初始化存放目录的List
hdfsPathsLists =newArrayList<String>();//调用获取目录方法getHdfspaths(newPath("/"));//循环输入结果for(String p:hdfsPathsLists){System.out.println(p);}}//获取目录方法privatevoidgetHdfspaths(Path path)throwsIOException{/*递归方法便利获取目录及目录下文件*/FileStatus[] dirs = fs.listStatus(path);for(FileStatus s:dirs){
hdfsPathsLists.add(s.getPath().toString());if(s.isDirectory()){getHdfspaths(s.getPath());}}}@Afterpublicvoidclose()throwsIOException{
fs.close();}
删除目录
**
fs.deleteOnExit()
**
//第一步,获取Hadoop FileSystem对象privateFileSystem fs =null;//第二步,初始化环境变量@Beforepublicvoidinit()throwsURISyntaxException,IOException,InterruptedException{
fs =FileSystem.get(newURI("hdfs://192.168.206.3:9000"),newConfiguration(),"root");}//删除目录@TestpublicvoidtestRMdir()throwsIOException{boolean flag = fs.deleteOnExit(newPath("/javaAPI/mk/dir1/dir2"));System.out.println(flag ?"删除成功":"删除失败");}
判断文件存在
**
fs.exists()
**
//第一步,获取Hadoop FileSystem对象privateFileSystem fs =null;//第二步,初始化环境变量@Beforepublicvoidinit()throwsURISyntaxException,IOException,InterruptedException{
fs =FileSystem.get(newURI("hdfs://192.168.206.3:9000"),newConfiguration(),"root");}//判断文件存在@TestpublicvoidtestExistsFile()throwsIOException{String src ="hdfs://192.168.206.3:9000/a.txt";boolean b = fs.exists(newPath(src));if(b){System.out.println(b +"-exists");}else{System.out.println(b +"-not exists");}}
目录和文件判断
**
fs.isDirectory
**
**
fs.isFile
**
//第一步,获取Hadoop FileSystem对象privateFileSystem fs =null;//第二步,初始化环境变量@Beforepublicvoidinit()throwsURISyntaxException,IOException,InterruptedException{
fs =FileSystem.get(newURI("hdfs://192.168.206.3:9000"),newConfiguration(),"root");}//目录和文件判断@TestpublicvoidtestfilesFile()throwsIOException{String src ="hdfs://192.168.206.3:9000/a.txt";boolean b = fs.isDirectory(newPath(src));if(b){System.out.println("是目录鸭");}elseif(fs.isFile(newPath(src))){System.out.println("是文件鸭");}else{System.out.println("不确定");}}
重命名文件
**
fs.rename
**
//第一步,获取Hadoop FileSystem对象privateFileSystem fs =null;//第二步,初始化环境变量@Beforepublicvoidinit()throwsURISyntaxException,IOException,InterruptedException{
fs =FileSystem.get(newURI("hdfs://192.168.206.3:9000"),newConfiguration(),"root");}//重命名文件@TestpublicvoidtestRenameFile()throwsIOException{String src ="hdfs://192.168.206.3:9000/a.txt";String hdfsDst ="hdfs://192.168.206.3:9000/b.txt";
fs.rename(newPath(src),newPath(hdfsDst));System.out.println("重命名成功");}
上传文件
**
fs.copyFromLocalFile
**
//第一步,获取Hadoop FileSystem对象privateFileSystem fs =null;//第二步,初始化环境变量@Beforepublicvoidinit()throwsURISyntaxException,IOException,InterruptedException{
fs =FileSystem.get(newURI("hdfs://192.168.206.3:9000"),newConfiguration(),"root");}//上传文件@TestpublicvoidtestUploadFile()throwsIOException{String src ="/simple/local.txt";String hdfsDst ="/javaAPI";
fs.copyFromLocalFile(newPath(src),newPath(hdfsDst));System.out.println("upload success");}
文件移动
**
fs.moveToLocalFile
**
**
fs.moveFromLocalFile
**
//第一步,获取Hadoop FileSystem对象privateFileSystem fs =null;//第二步,初始化环境变量@Beforepublicvoidinit()throwsURISyntaxException,IOException,InterruptedException{
fs =FileSystem.get(newURI("hdfs://192.168.206.3:9000"),newConfiguration(),"root");}//文件移动/*
* 对hdfs上的文件进行移动到本地
* */@TestpublicvoidtestmovetoLocalFile()throwsIOException{String src ="hdfs://192.168.206.3:9000/a.txt";String desc ="/root/";
fs.moveToLocalFile(newPath(src),newPath(desc));System.out.println("成功移动");}/*
* 对本地上的文件进行移动到hdfs
* 此时使用的是moveFromLocalFile,所以会将本地的移到hdfs,即本地不存在了。
* */@TestpublicvoidtestmovetoHdfsFile()throwsIOException{String src ="/root/b.txt";String desc ="hdfs://192.168.206.3:9000/javaAPI";
fs.moveFromLocalFile(newPath(src),newPath(desc));System.out.println("成功移动");}
文件下载
**
fs.copyToLocalFile
**
//第一步,获取Hadoop FileSystem对象privateFileSystem fs =null;//第二步,初始化环境变量@Beforepublicvoidinit()throwsURISyntaxException,IOException,InterruptedException{
fs =FileSystem.get(newURI("hdfs://192.168.206.3:9000"),newConfiguration(),"root");}/*
* 文件下载到本地
* */@TestpublicvoidtestDownloadFile()throwsIOException{String src ="/javaAPI/mk/dir1/a.txt";String hdfsDst ="/root/";
fs.copyToLocalFile(newPath(src),newPath(hdfsDst));System.out.println("下载成功");}
本文转载自: https://blog.csdn.net/weixin_45740510/article/details/122094891
版权归原作者 与宇宙对视 所有, 如有侵权,请联系我们删除。
版权归原作者 与宇宙对视 所有, 如有侵权,请联系我们删除。