1.创建maven项目
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.ztesoft</groupId>
<artifactId>Hadoop</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<name>Hadoop</name>
<url>http://maven.apache.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.8</version>
<scope>system</scope>
<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<!-- hadoop 公共类库 -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.8.3</version>
</dependency>
<!-- hadoop 分布式文件系统类库 -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.8.3</version>
</dependency>
</dependencies>
</project>
2.编写测试类
package com.ztesoft.Hadoop;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Before;
import org.junit.Test;
public class App {
private FileSystem fileSystem = null;
@Before
public void init() throws Exception {
Configuration conf = new Configuration();
conf.set("dfs.namenode.kerberos.principal", "hdfs/_HOST@NBDP.COM");
conf.set("hadoop.security.authentication", "Kerberos");
UserGroupInformation.setConfiguration(conf);
UserGroupInformation ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI("bdp/admin@NBDP.COM", "/home/keydir/bdp/bdp.keytab");
UserGroupInformation.setLoginUser(ugi);
fileSystem = FileSystem.get(new URI("hdfs://192.168.206.150:9000"), conf, "root");
}
/**
* 把本地文件上传到hfds
* @throws IOException
* @throws IllegalArgumentException
*/
public void uploadFile() throws Exception {
fileSystem.copyFromLocalFile(new Path("G:/a.txt"), new Path("/"));
fileSystem.close();
}
/**
* 用输出流的方式往hdfs中写数据
* @throws IllegalArgumentException
* @throws IOException
*/
@Test
public void testWriteDataToHdfsFile() throws IOException {
FSDataOutputStream hdfsOut = fileSystem.create(new Path("/d.txt"), false);
hdfsOut.write("123456789".getBytes());
hdfsOut.write("123456789".getBytes());
hdfsOut.flush();
hdfsOut.close();
fileSystem.close();
}
/**
* 读取文件
* hdfs客户端从hdfs上读取数据写入本地磁盘时,可以选择使用hadoop自己开发的本地库来操作,也可以使用java的原生库来操作本地文件
* 参数就是 : useRawLocalFileSystem,如果为true,则使用java原生库,否则,使用hadoop自己的本地库
* 如果要是用hadoop自己的本地库来操作本地磁盘文件,需要在本地系统中配置一下hadoop的本地库目录到环境变量中
* false不要删除原 使用本地库
* @throws Exception
*/
@Test
public void testGetFile() throws Exception {
fileSystem.copyToLocalFile(false, new Path("/a.txt"), new Path("G:/"), true);
fileSystem.close();
}
/**
* 查询指定路径下的所有文件夹或者文件节点信息hdfsClient.listStatus
*
* @throws FileNotFoundException
* @throws IllegalArgumentException
* @throws IOException
*/
@Test
public void testListDir() throws Exception {
FileStatus[] listStatus = fileSystem.listStatus(new Path("/"));
for (FileStatus file : listStatus) {
System.out.println(file.getPath());
System.out.println((file.isDirectory() ? "d" : "f"));//如果是文件打印f 如果是文件夹打印d
}
fileSystem.close();
}
/**
* 测试删除文件
* @throws Exception
*/
@Test
public void delete() throws Exception {
String filePath = "/a.txt";
Path path = new Path(filePath);
boolean isok = fileSystem.deleteOnExit(path);
if (isok) {
System.out.println("delete file " + filePath + " success!");
} else {
System.out.println("delete file " + filePath + " failure");
}
fileSystem.close();
}
}
3.验证
可见namenode所在的地址