package cn.framelife.hadoop;
import java.io.IOException;
import java.io.InputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
/**
* 使用FileSystem API读取数据
* @author 潘广伟
*
*/
public class FileSystemCat {
public static Configuration getConf(){
Configuration configuration = new Configuration();
configuration.addResource(new Path("core-site.xml"));
configuration.addResource(new Path("hdfs-site.xml"));
return configuration;
}
public static void main(String[] args) {
InputStream in = null;
String url = "hdfs://namenode:9000/user/hadoop/hello1.txt";
try {
FileSystem fs = FileSystem.get(getConf());
in = fs.open(new Path(url));
IOUtils.copyBytes(in, System.out, in.available(), false);
} catch (IOException e) {
e.printStackTrace();
}finally{
IOUtils.closeStream(in);
}
}
}
package cn.framelife.hadoop;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.io.IOUtils;
/**
* 使用Hadoop URL读取数据
* @author 潘广伟
*
*/
public class URLCat {
static{
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
}
public static void main(String[] args) {
InputStream in = null;
String url = "hdfs://namenode:9000/user/hadoop/hello1.txt";
try {
in = new URL(url).openStream();
byte[] b = new byte[in.available()];
in.read(b, 0, in.available());
String msg = new String(b);
System.out.println("接收到的信息:"+msg);
//下面是通过IOUtils工具把输入流中的数据使用系统输出
// IOUtils.copyBytes(in, System.out, 4096, false);
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}finally{
IOUtils.closeStream(in);
}
}
}