1.文件操作
(1)create写文件
(2)open读取文件
(3)delete删除文件
2.目录操作
(1)mkdirs创建目录
(2)delete删除文件或目录
(3)listStatus列出目录的内容
(4)getFileBlockLocations显示文件存储位置
1.创建maven项目
在pom.xml中追加hadoop-client依赖
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.7.6</version>
</dependency>
在pom.xml中追加junit依赖
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>RELEASE</version>
<scope>compile</scope>
</dependency>
编写测试类TestHDFS
public class TestHDFS {
//构建FileSystem对象
FileSystem fs = null;
@Before
public void init() throws IOException {
Configuration conf = new Configuration();
//配置HDFS URL
conf.set("fs.defaultFS","hdfs://master:9000");
//设置副本的数量
conf.set("dfs.replication","1");
fs = FileSystem.get(conf);
}
@Test
//写文件
public void createFile() throws IOException {
FSDataOutputStream fsDataOutputStream = fs.create(new Path("/test02.txt"));
}
@Test
//追加内容到文件中
public void writeFile() throws IOException {
Path path = new Path("/test01.txt");
FSDataOutputStream fsDataOutputStream = null;
if (!fs.exists(path)){
fsDataOutputStream = fs.create(path);
}else {
fsDataOutputStream = fs.append(path);
}
BufferedWriter bufferedWriter = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream));
bufferedWriter.write("hadoop hdfs mapreduce yarn");
bufferedWriter.newLine();
bufferedWriter.write("hive hbase spark kafka scala flink");
bufferedWriter.newLine();
bufferedWriter.write("flume sqoop dataX flinkX azkaban zookeeper");
bufferedWriter.newLine();
bufferedWriter.write("phoneix ElasticSearch LogStash Kibana Kylin");
bufferedWriter.close();
fsDataOutputStream.close();
}
@Test
//删除文件
public void deleteFile() throws IOException {
// boolean delete = fs.delete(new Path("/b.txt"));
// System.out.println(delete);
//recursive 是否递归删除
//如果要和删除一个目录,则置为true
fs.delete(new Path("/test"),true);
}
@Test
//读取文件
public void openFile() throws IOException {
FSDataInputStream open = fs.open(new Path("/ZentoPython.txt"));
BufferedReader reader = new BufferedReader(new InputStreamReader(open));
String line = null;
while((line = reader.readLine())!=null){
System.out.println(line);
}
reader.close();
open.close();
}
@Test
//创建目录
public void mkdirs() throws IOException {
boolean mkdirs = fs.mkdirs(new Path("/dir1/dir2"));
System.out.println(mkdirs);
}
@Test
//列出目录的内容
public void listStatus() throws IOException {
FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
for (FileStatus fileStatus : fileStatuses) {
// System.out.println(fileStatus.getPath());
if (fileStatus.isDirectory()) {
for (FileStatus status : fs.listStatus(fileStatus.getPath())) {
System.out.println(status.getPath());
}
}else if (fileStatus.isFile()){
System.out.println(fileStatus.getPath());
}
}
}
@Test
// 显示文件存储位置
public void getFileBlockLocations() throws IOException {
BlockLocation[] fileBlockLocations = fs.getFileBlockLocations(new Path("/a.txt"), 0, 2);
for (BlockLocation fileBlockLocation : fileBlockLocations) {
for (String s : fileBlockLocation.getHosts()) {
System.out.println(s);
}
for (String s : fileBlockLocation.getNames()) {
System.out.println(s);
}
}
}
@After
public void close() throws IOException {
fs.close();
}
}