package com.bpf.hdfs;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Before;
import org.junit.Test;
/* 用流的方式来操作HDFS上的文件
* 可以实现读取制定偏移量范围的数据
* */
public class HdfsStreamAccess {
FileSystem fs = null;
Configuration conf = null;
//初始化,建立与HDFS的连接
@Before
public void init() throws Exception {
conf = new Configuration();
fs = FileSystem.get(new URI("hdfs://Master:9000"), conf);
}
//流的方式上传文件
@Test
public void testUpload() throws IllegalArgumentException, IOException {
FSDataOutputStream outputStream = fs.create(new Path("/bpf.txt"), true);
FileInputStream inputStream = new FileInputStream("E:/QQPCmgr/Desktop/test.txt");
IOUtils.copy(inputStream, outputStream);
}
//流的方式下载文件
@Test
public void tetsDownload() throws IllegalArgumentException, IOException {
FSDataInputStream inputStream = fs.open(new Path("/bpf.txt"));
FileOutputStream outputStream = new FileOutputStream("E:/QQPCmgr/Desktop/test1.txt");
IOUtils.copy(inputStream, outputStream);
}
//流的方式读取制定偏移量范围的数据
@Test
public void testRandomAccess() throws IllegalArgumentException, IOException {
FSDataInputStream inputStream = fs.open(new Path("/bpf.txt"));
//从第十三个字节开始读
inputStream.seek(12);
FileOutputStream outputStream = new FileOutputStream("E:/QQPCmgr/Desktop/test2.txt");
IOUtils.copy(inputStream, outputStream);
}
//流的方式读取并输出文件内容到控制台
@Test
public void testCat() throws IllegalArgumentException, IOException {
FSDataInputStream inputStream = fs.open(new Path("/bpf.txt"));
IOUtils.copy(inputStream, System.out);
}
}
用流的方式操作HDFS文件 JAVA API
最新推荐文章于 2022-11-09 11:38:03 发布