在Hadoop集群中编译并运行《权威指南》中的例3.2
Java文件代码如下
package com.changtu;
import java.io.InputStream;
import java.net.URL;
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.io.IOUtils;
public class Example3_1 {
static {
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
}
public static void main(String[] args) throws Exception {
InputStream in = null;
try {
in = new URL("hdfs://192.168.152.128:9000/user/hadoop/input/test1.txt").openStream();
IOUtils.copyBytes(in, System.out, 4096, false);
} finally {
IOUtils.closeStream(in);
}
}
}
数据文件的读写
import java.io.File;
import java.io.FileInputStream;
import java.io.OutputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
public class HomeWork2_2 {
public static void main(String[] args) throws Exception {
String localSrc = "/home/hadoop/soft/hadoop_examples/homework2_2.txt";
String dst = "hdfs://192.168.152.128:9000/user/hadoop/input/homework2_2.txt";
FileInputStream in = null;
OutputStream out = null;
Configuration conf = new Configuration();
try {
in = new FileInputStream(new File(localSrc));
FileSystem fs = FileSystem.get(URI.create(dst), conf);
out = fs.create(new Path(dst), new Progressable() {
@Override
public void progress() {
System.out.println(".");
}
});
in.skip(100);
byte[] newStr = new byte[20];
int read = in.read(newStr);
out.write(newStr);
} finally {
IOUtils.closeStream(in);
IOUtils.closeStream(out);
}
}
}import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
public class HomeWork2_3 {
public static void main(String[] args) throws Exception {
String dfsSrc = "hdfs://192.168.152.128:9000/user/hadoop/input/homework2_3.txt";
String dst = "/home/hadoop/soft/hadoop_examples/homework2_3.txt";
FSDataInputStream in = null;
OutputStream out = null;
Configuration conf = new Configuration();
try {
FileSystem fs = FileSystem.get(URI.create(dfsSrc), conf);
in = fs.open(new Path(dfsSrc));
out = new FileOutputStream(new File(dst));
in.skip(100);
byte[] newStr = new byte[20];
int read = in.read(newStr);
out.write(newStr);
} finally {
IOUtils.closeStream(in);
IOUtils.closeStream(out);
}
}
}

本文详细介绍了如何在Hadoop集群中编译并运行《权威指南》中的例3.2 Java代码,并通过实例演示了如何在本地与HDFS之间读写数据文件。
1894

被折叠的 条评论
为什么被折叠?



