import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Before;
import org.junit.Test;
public class Read {
FileSystem fs = null;
@Before
public void init() throws Exception {
Configuration conf = new Configuration();
conf.set("dfs.replication", "2");
conf.set("dfs.blocksize", "128m");
fs.get(new URI("hdfs://hadoop1:9000"), conf, "root");
}
/*
* 读取hdfs中的文件内容
*/
@Test
public void testReadDate() throws Exception {
FSDataInputStream in = fs.open(new Path("/name.txt"));
BufferedReader br = new BufferedReader(new InputStreamReader(in, "utf-8"));
String line = null;
while((line=br.readLine()) != null) {
System.out.println(line);
}
in.close();
fs.close();
}
/*
* 读取hdfs中指定偏移量范围内的内容
*/
@Test
public void testReadSeekDate() throws Exception {
FSDataInputStream in = fs.open(new Path("/name.txt"));
//将读取的起始位置进行指定
in.seek(12);
//读取16个字节
byte[] buf = new byte[16];
System.out.println(new String(buf));
in.close();
fs.close();
}
}
04使用api读取hdfs文件内容
最新推荐文章于 2022-11-09 11:38:03 发布