文章目录
HDFS的shell命令
1、安全模式
安全模式:集群启动时,DN所有的DN都必须向MM汇报磁盘使用状态和block存储信息。在此之前出于对hdfs的保护,会禁止访问hdfs,此状态为安全模式
1.查看安全模式状态
#查看安全模式状态
hdfs dfsadmin --safemode get
#-状态-
on|off
#-----
2.手工开启安全模式状态
#手工进入安全模式
hdfs dfsadmin --safemode enter
3.手工关闭安全模式状态
#手工退出安全模式
hdfs dfsadmin --safemode leave
2、文件操作指令
指令的一般格式如下:
hdfs dfs -CMD [-OPTION][path1] ★★★#使用较多
hadoop fs -CMD [-OPTION][path1]
1.查看文件目录
#查看指定路径的当前目录结构
hdfs dfs -ls <路径>#查看HDFS根目录结构的文件组成
hdfs dfs -ls /
#--------------------------------------------------------------
Found 1 items
drwxrwx--- - root supergroup 02021-12-29 08:37 /tmp
#--------------------------------------------------------------#递归查看指定路径的目录结构
hdfs dfs -ls -R <路径>#递归根目录的目录结构
hdfs dfs -lsr /
hdfs dfs -ls -R /
#----------------------------------------------------------------------------------------
drwxr-xr-x - root supergroup 02021-12-29 10:07 /test
drwxr-xr-x - root supergroup 02021-12-29 10:07 /test/kb16
drwxr-xr-x - root supergroup 02021-12-29 12:25 /test/kb16/hadoop
-rw-r--r-- 1 root supergroup 6782609872021-12-29 12:25 /test/kb16/hadoop/ratings.csv
#----------------------------------------------------------------------------------------
2.查看文件夹情况
#统计目录下个文件大小
hdfs dfs -du <路径>#统计/test/kb16/hadoop/下文件大小
hdfs dfs -du /test/hadoop/
#--------------------------------------------------678260987678260987 /test/hadoop/test1.csv
#--------------------------------------------------#汇总统计目录下文件(夹)大小
hdfs dfs -du -s <路径>#汇总统计目录下文件(夹)大小
hdfs dfs -du -s /
#----------------------678260987678260987 /
#----------------------#统计文件(夹)数量
hdfs dfs -count [-q]<路径>#统计文件(夹)数量#-------------------101678260987 /
#-------------------
3.文件操作
#移动文件
hdfs dfs -mv <源路径><目的路径>#将ratings.csv移动到/tmp/hadoop-yarn中
hdfs dfs -mv /test/hadoop/ratings.csv /tmp/hadoop-yarn
#--------------------------------------------------------------------------------------
drwxr-xr-x - root supergroup 02021-12-29 10:07 /test
drwxr-xr-x - root supergroup 02021-12-29 10:07 /test/kb16
drwxr-xr-x - root supergroup 02021-12-29 17:41 /test/kb16/hadoop
drwxrwx--- - root supergroup 02021-12-29 08:37 /tmp
drwxrwx--- - root supergroup 02021-12-29 17:41 /tmp/hadoop-yarn
-rw-r--r-- 1 root supergroup 6782609872021-12-29 12:25 /tmp/hadoop-yarn/ratings.csv
#---------------------------------------------------------------------------------------#将指定路径文件复制到目标路径
hdfs dfs -cp <源路径><目的路径>#将/test/kb16/hadoop下的ratings.csv复制到test/kb16中
hdfs dfs -cp hdfs dfs -cp /test/hadoop/ratings.csv /test/kb16
#------------------------------------------------------------------------------------2021-12-29 17:47:27,806 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:27,980 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:28,537 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:29,513 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:30,365 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:31,239 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:32,106 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false /test/kb16
2021-12-29 17:47:27,806 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:27,980 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:28,537 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:29,513 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:30,365 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:31,239 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:32,106 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false#------------------------------------------------------------------------------------#删除文件/空白文件夹
hdfs dfs -rm [-skiptrash]<路径>#删除/test/kb16下的ratings.csv
hdfs dfs -rm /test/ratings.csv
#-----------------------------
Deleted /test/ratings.csv
#-----------------------------#递归删除文件/空白文件夹
hdfs dfs -rmr [-skiptrash]<路径>
4.上传文件
#上传文件到hadoop中
hdfs dfs -put <多个linux上的文件><hdfs路径>#上传文件到/test/kb16中
hdfs dfs -put ratings.csv /test/
#-------------------------------------------------------------------------------------2021-12-29 18:07:24,565 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 18:07:25,012 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 18:07:25,257 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 18:07:25,673 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 18:07:26,478 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 18:07:27,162 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false#-------------------------------------------------------------------------------------#将本地文件移动到hdfs路径上
hdfs dfs -copyfromlocal <多个linux上的文件><hdfs路径>#将本地文件复制到hdfs路径上
hdfs dfs -movefromlocal <多个linux上的文件><hdfs路径>
5、获取文件
#将hdfs中路径文件下载到本地
hdfs dfs -get <hdfs路径><linux路径>#将hadoop中/test/kb16/hadoop下的ratings.csv下载到根目录中
hdfs dfs -get /test/hadoop/ratings.csv ~/
#-------------------------------------------------------------------------------------2021-12-29 18:15:14,441 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false#-------------------------------------------------------------------------------------#将hdfs中路径文件合并到本地
hdfs dfs -getmerge <源路径><linux路径>#
hdfs dfs -getmerge /test/hadoop/ratings.csv ratings.csv
#-------------------------------------------------------------------------------------2021-12-29 18:18:08,398 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false#-------------------------------------------------------------------------------------#将文件从HDFS中复制到本地
hdfs dfs -copytolocal [-ignorecrc][-crc][hdfs源路径][linux目的路径]#将文件从HDFS中移动到本地
hdfs dfs -movetolocal [-crc]<hdfs源路径><linux目的路径> 从本地移动
6.查看文件内容
#利用管道符进行文件的查看
hdfs dfs -cat path|查询范围
#利用text命令查看文件内容
hdfs dfs -text <hdfs路径> 查看文件内容
#查看文件尾部信息
hdfs dfs -tail [-f]<文件>#查看文件头部信息
hdfs dfs -head [-f]<文件>
7.创建目录
#创建多级文件目录
hdfs dfs -mkdir -p path1/path2
8.修改副本数量
修改hadoop备份的副本数量,默认是三份
#修改hadoop备份的副本数量
hdfs dfs -setrep [-r][-w]<副本数><路径>
9.创建空白文件(不推荐使用)
利用-touchz命令创建空白文件的过程中可能会产生错误,因此不推荐使用这个命令
#在指定路径上创建创建空白文件
hdfs dfs -touchz <文件路径>
10.显示文件统计信息
#显示文件统计信息
hdfs dfs -stat -stat [format]<路径>#------------------2021-12-29 04:25:27
#------------------
11、修改权限
#修改文件权限
hdfs dfs -chmod [-r]<权限模式>[路径]#修改文件的所有者
hdfs dfs -chown [-r][属主][:[属组]] 路径
#修改文件所属的用户组
hdfs dfs -chgrp [-r] 属组名称 路径
12、查看指令的帮助信息
#查看指令的帮助信息
hdfs dfs -help [命令选项]
1、安全模式
安全模式:集群启动时,DN所有的DN都必须向MM汇报磁盘使用状态和block存储信息。在此之前出于对hdfs的保护,会禁止访问hdfs,此状态为安全模式
1.查看安全模式状态
#查看安全模式状态
hdfs dfsasmin --safemode get
#-状态-
on|off
#-----
2.手工开启安全模式状态
#手工进入安全模式
hdfs dfsasmin --safemode enter
3.手工关闭安全模式状态
#手工退出安全模式
hdfs dfsasmin --safemode leave
2、文件操作指令
指令的一般格式如下:
hdfs dfs -CMD [-OPTION][path1] ★★★#使用较多
hadoop fs -CMD [-OPTION][path1]
1.查看文件目录
#查看指定路径的当前目录结构
hdfs dfs -ls <路径>#查看HDFS根目录结构的文件组成
hdfs dfs -ls /
#--------------------------------------------------------------
Found 1 items
drwxrwx--- - root supergroup 02021-12-29 08:37 /tmp
#--------------------------------------------------------------#递归查看指定路径的目录结构
hdfs dfs -ls -R <路径>#递归根目录的目录结构
hdfs dfs -lsr /
hdfs dfs -ls -R /
#----------------------------------------------------------------------------------------
drwxr-xr-x - root supergroup 02021-12-29 10:07 /test
drwxr-xr-x - root supergroup 02021-12-29 10:07 /test/kb16
drwxr-xr-x - root supergroup 02021-12-29 12:25 /test/kb16/hadoop
-rw-r--r-- 1 root supergroup 6782609872021-12-29 12:25 /test/kb16/hadoop/ratings.csv
#----------------------------------------------------------------------------------------
2.查看文件夹情况
#统计目录下个文件大小
hdfs dfs -du <路径>#统计/test/kb16/hadoop/下文件大小
hdfs dfs -du /test/kb16/hadoop/
#--------------------------------------------------678260987678260987 /test/kb16/hadoop/ratings.csv
#--------------------------------------------------#汇总统计目录下文件(夹)大小
hdfs dfs -du -s <路径>#汇总统计目录下文件(夹)大小
hdfs dfs -du -s /
#----------------------678260987678260987 /
#----------------------#统计文件(夹)数量
hdfs dfs -count [-q]<路径>#统计文件(夹)数量#-------------------101678260987 /
#-------------------
3.文件操作
#移动文件
hdfs dfs -mv <源路径><目的路径>#将ratings.csv移动到/tmp/hadoop-yarn中
hdfs dfs -mv /test/kb16/hadoop/ratings.csv /tmp/hadoop-yarn
#--------------------------------------------------------------------------------------
drwxr-xr-x - root supergroup 02021-12-29 10:07 /test
drwxr-xr-x - root supergroup 02021-12-29 10:07 /test/kb16
drwxr-xr-x - root supergroup 02021-12-29 17:41 /test/kb16/hadoop
drwxrwx--- - root supergroup 02021-12-29 08:37 /tmp
drwxrwx--- - root supergroup 02021-12-29 17:41 /tmp/hadoop-yarn
-rw-r--r-- 1 root supergroup 6782609872021-12-29 12:25 /tmp/hadoop-yarn/ratings.csv
#---------------------------------------------------------------------------------------#将指定路径文件复制到目标路径
hdfs dfs -cp <源路径><目的路径>#将/test/kb16/hadoop下的ratings.csv复制到test/kb16中
hdfs dfs -cp hdfs dfs -cp /test/kb16/hadoop/ratings.csv /test/kb16
#------------------------------------------------------------------------------------2021-12-29 17:47:27,806 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:27,980 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:28,537 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:29,513 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:30,365 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:31,239 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:32,106 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false /test/kb16
2021-12-29 17:47:27,806 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:27,980 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:28,537 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:29,513 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:30,365 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:31,239 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 17:47:32,106 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false#------------------------------------------------------------------------------------#删除文件/空白文件夹
hdfs dfs -rm [-skiptrash]<路径>#删除/test/kb16下的ratings.csv
hdfs dfs -rm /test/kb16/ratings.csv
#-----------------------------
Deleted /test/kb16/ratings.csv
#-----------------------------#递归删除文件/空白文件夹
hdfs dfs -rmr [-skiptrash]<路径>
4.上传文件
#上传文件到hadoop中
hdfs dfs -put <多个linux上的文件><hdfs路径>#上传文件到/test/kb16中
hdfs dfs -put ratings.csv /test/kb16
#-------------------------------------------------------------------------------------2021-12-29 18:07:24,565 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 18:07:25,012 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 18:07:25,257 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 18:07:25,673 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 18:07:26,478 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false2021-12-29 18:07:27,162 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false#-------------------------------------------------------------------------------------#将本地文件移动到hdfs路径上
hdfs dfs -copyfromlocal <多个linux上的文件><hdfs路径>#将本地文件复制到hdfs路径上
hdfs dfs -movefromlocal <多个linux上的文件><hdfs路径>
5、获取文件
#将hdfs中路径文件下载到本地
hdfs dfs -get <hdfs路径><linux路径>#将hadoop中/test/kb16/hadoop下的ratings.csv下载到根目录中
hdfs dfs -get /test/kb16/hadoop/ratings.csv ~/
#-------------------------------------------------------------------------------------2021-12-29 18:15:14,441 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false#-------------------------------------------------------------------------------------#将hdfs中路径文件合并到本地
hdfs dfs -getmerge <源路径><linux路径>#
hdfs dfs -getmerge /test/kb16/hadoop/ratings.csv ratings.csv
#-------------------------------------------------------------------------------------2021-12-29 18:18:08,398 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted =false#-------------------------------------------------------------------------------------#将文件从HDFS中复制到本地
hdfs dfs -copytolocal [-ignorecrc][-crc][hdfs源路径][linux目的路径]#将文件从HDFS中移动到本地
hdfs dfs -movetolocal [-crc]<hdfs源路径><linux目的路径> 从本地移动
6.查看文件内容
#利用管道符进行文件的查看
hdfs dfs -cat path|查询范围
#利用text命令查看文件内容
hdfs dfs -text <hdfs路径> 查看文件内容
#查看文件尾部信息
hdfs dfs -tail [-f]<文件>#查看文件头部信息
hdfs dfs -head [-f]<文件>
7.创建目录
#创建多级文件目录
hdfs dfs -mkdir -p path1/path2
8.修改副本数量
修改hadoop备份的副本数量,默认是三份
#修改hadoop备份的副本数量
hdfs dfs -setrep [-r][-w]<副本数><路径>
9.创建空白文件(不推荐使用)
利用-touchz命令创建空白文件的过程中可能会产生错误,因此不推荐使用这个命令
#在指定路径上创建创建空白文件
hdfs dfs -touchz <文件路径>
10.显示文件统计信息
#显示文件统计信息
hdfs dfs -stat -stat [format]<路径>#------------------2021-12-29 04:25:27
#------------------
11、修改权限
#修改文件权限
hdfs dfs -chmod [-r]<权限模式>[路径]#修改文件的所有者
hdfs dfs -chown [-r][属主][:[属组]] 路径
#修改文件所属的用户组
hdfs dfs -chgrp [-r] 属组名称 路径
12、查看指令的帮助信息
#查看指令的帮助信息
hdfs dfs -help [命令选项]
版权归原作者 绝域时空 所有, 如有侵权,请联系我们删除。