package com.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import javax.print.DocFlavor;
import java.io.*;
import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* HDFS API 操作
*/
public class HDFSUtils {
private static SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
private static Configuration conf = new Configuration();
private static FileSystem fileSystem;
static {
try {
String uri = "hdfs://192.168.3.200:9000";
fileSystem = FileSystem.get(URI.create(uri), conf);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 1.获取HDFS指定目录中文件列表信息
*
* @param hdfsPath (例如"/"根目录)
* @throws IOException
*/
public static FileStatus[] list(String hdfsPath) throws IOException {
Path path = new Path(hdfsPath);
FileStatus[] fs = fileSystem.listStatus(path);
for (FileStatus f : fs) {
FsPermission fp = f.getPermission();
String acl = "d";
FsAction ua = fp.getUserAction();
FsAction ga = fp.getGroupAction();
FsAction oa = fp.getOtherAction();
String replication = "-";
if (f.isFile()) {
acl = "-";
replication = f.getReplication() + "";
}
acl += ua.SYMBOL + ga.SYMBOL + oa.SYMBOL;
String owner = f.getOwner();
String group = f.getGroup();
long len = f.getLen();
String mDate = df.format(new Date(f.getModificationTime()));
String filePath = f.getPath().toString();
System.out.println(acl + " " + replication + " " + owner + " " + group + "\t" + len + "\t" + mDate + " " + filePath);
}
return fs;
}
/**
* 在HDFS中创建目录
*
* @param dir 相对目录(例如:/input/data/)
* @return
* @throws IOException
*/
public static boolean mkdir(String dir) throws IOException {
boolean mkdirs = fileSystem.mkdirs(new Path(dir));
return mkdirs;
}
/**
* 在HDFS指定目录中创建文件
*
* @param filePath (例如:/input/hello.txt)
* @param contents 文件内容
* @throws Exception
*/
public static void createFile(String filePath, byte[] contents) throws Exception {
FSDataOutputStream out = fileSystem.create(new Path(filePath));
ByteArrayInputStream in = new ByteArrayInputStream(contents);
IOUtils.copyBytes(in, out, 1024, true);
}
/**
* 读取HDFS中的文件内容
*
* @param hdfsPath ("/input/hello.txt")
* @return
* @throws Exception
*/
public static String readFile(String hdfsPath) throws Exception {
FSDataInputStream inputStream = fileSystem.open(new Path(hdfsPath));
int len = 0;
byte[] b = new byte[1024];
StringBuffer sb = new StringBuffer();
while ((len = inputStream.read(b)) != -1) {
sb.append(new String(b, 0, len));
}
inputStream.close();
return sb.toString();
}
/**
* 显示文件存储位置
*
* @param hdfsPath (例如:/output/readme.txt)
* @throws IOException
*/
public static BlockLocation[] getFileBlockLocation(String hdfsPath) throws IOException {
Path path = new Path(hdfsPath);
FileStatus fStatus = fileSystem.getFileStatus(path);
BlockLocation[] locations = fileSystem.getFileBlockLocations(fStatus, 0, fStatus.getLen());
for (BlockLocation location : locations) {
System.out.println(location);
}
return locations;
}
/**
* 删除HDFS中的目录或者文件
*
* @param hdfsPath
* @return
* @throws IOException
*/
public static boolean delete(String hdfsPath) throws IOException {
return fileSystem.delete(new Path(hdfsPath), true);
}
/**
* 上传文件到HDFS中
*
* @param hdfsPath ("/input/")
* @param localPath ("d:/hello.txt")
* @throws IOException
*/
public static void uploadFile(String hdfsPath, String localPath) throws IOException {
FileInputStream in=new FileInputStream(localPath);
FSDataOutputStream out=fileSystem.create(new Path(hdfsPath),true);
IOUtils.copyBytes(in,out,1024,true);
}
/**
* 从HDFS中下载文件到本地
*
* @param hdfsPath ("/input/hello.txt")
* @param localPath ("d:/hello.txt")
* @throws IOException
*/
public static void downlaodFile(String hdfsPath, String localPath) throws IOException {
FSDataInputStream in = fileSystem.open(new Path(hdfsPath));
FileOutputStream out=new FileOutputStream(localPath);
IOUtils.copyBytes(in,out,1024,true);
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import javax.print.DocFlavor;
import java.io.*;
import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* HDFS API 操作
*/
public class HDFSUtils {
private static SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
private static Configuration conf = new Configuration();
private static FileSystem fileSystem;
static {
try {
String uri = "hdfs://192.168.3.200:9000";
fileSystem = FileSystem.get(URI.create(uri), conf);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 1.获取HDFS指定目录中文件列表信息
*
* @param hdfsPath (例如"/"根目录)
* @throws IOException
*/
public static FileStatus[] list(String hdfsPath) throws IOException {
Path path = new Path(hdfsPath);
FileStatus[] fs = fileSystem.listStatus(path);
for (FileStatus f : fs) {
FsPermission fp = f.getPermission();
String acl = "d";
FsAction ua = fp.getUserAction();
FsAction ga = fp.getGroupAction();
FsAction oa = fp.getOtherAction();
String replication = "-";
if (f.isFile()) {
acl = "-";
replication = f.getReplication() + "";
}
acl += ua.SYMBOL + ga.SYMBOL + oa.SYMBOL;
String owner = f.getOwner();
String group = f.getGroup();
long len = f.getLen();
String mDate = df.format(new Date(f.getModificationTime()));
String filePath = f.getPath().toString();
System.out.println(acl + " " + replication + " " + owner + " " + group + "\t" + len + "\t" + mDate + " " + filePath);
}
return fs;
}
/**
* 在HDFS中创建目录
*
* @param dir 相对目录(例如:/input/data/)
* @return
* @throws IOException
*/
public static boolean mkdir(String dir) throws IOException {
boolean mkdirs = fileSystem.mkdirs(new Path(dir));
return mkdirs;
}
/**
* 在HDFS指定目录中创建文件
*
* @param filePath (例如:/input/hello.txt)
* @param contents 文件内容
* @throws Exception
*/
public static void createFile(String filePath, byte[] contents) throws Exception {
FSDataOutputStream out = fileSystem.create(new Path(filePath));
ByteArrayInputStream in = new ByteArrayInputStream(contents);
IOUtils.copyBytes(in, out, 1024, true);
}
/**
* 读取HDFS中的文件内容
*
* @param hdfsPath ("/input/hello.txt")
* @return
* @throws Exception
*/
public static String readFile(String hdfsPath) throws Exception {
FSDataInputStream inputStream = fileSystem.open(new Path(hdfsPath));
int len = 0;
byte[] b = new byte[1024];
StringBuffer sb = new StringBuffer();
while ((len = inputStream.read(b)) != -1) {
sb.append(new String(b, 0, len));
}
inputStream.close();
return sb.toString();
}
/**
* 显示文件存储位置
*
* @param hdfsPath (例如:/output/readme.txt)
* @throws IOException
*/
public static BlockLocation[] getFileBlockLocation(String hdfsPath) throws IOException {
Path path = new Path(hdfsPath);
FileStatus fStatus = fileSystem.getFileStatus(path);
BlockLocation[] locations = fileSystem.getFileBlockLocations(fStatus, 0, fStatus.getLen());
for (BlockLocation location : locations) {
System.out.println(location);
}
return locations;
}
/**
* 删除HDFS中的目录或者文件
*
* @param hdfsPath
* @return
* @throws IOException
*/
public static boolean delete(String hdfsPath) throws IOException {
return fileSystem.delete(new Path(hdfsPath), true);
}
/**
* 上传文件到HDFS中
*
* @param hdfsPath ("/input/")
* @param localPath ("d:/hello.txt")
* @throws IOException
*/
public static void uploadFile(String hdfsPath, String localPath) throws IOException {
FileInputStream in=new FileInputStream(localPath);
FSDataOutputStream out=fileSystem.create(new Path(hdfsPath),true);
IOUtils.copyBytes(in,out,1024,true);
}
/**
* 从HDFS中下载文件到本地
*
* @param hdfsPath ("/input/hello.txt")
* @param localPath ("d:/hello.txt")
* @throws IOException
*/
public static void downlaodFile(String hdfsPath, String localPath) throws IOException {
FSDataInputStream in = fileSystem.open(new Path(hdfsPath));
FileOutputStream out=new FileOutputStream(localPath);
IOUtils.copyBytes(in,out,1024,true);
}
}