import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import java.io.IOException;
public class SquenceFileDemo {
private static final String[] DATA = {
"One,two,buckle my shoe",
"Three,four,shut the dooe",
"Hello hadiiop"
};
public static void createSequenceFile(String filePath){
Configuration conf = new Configuration();
Path path = new Path(filePath);
IntWritable key = new IntWritable();
Text value = new Text();
try( SequenceFile.Writer writer = SequenceFile.createWriter(conf,
SequenceFile.Writer.file(path),
SequenceFile.Writer.keyClass(key.getClass()),
SequenceFile.Writer.keyClass(value.getClass()));
) {
for(int i = 0;i < 100; i++){
key.set(100 - i);
value.set(DATA[i % DATA.length]);
writer.append(key,value);
}
} catch (IOException e) {
e.printStackTrace();
}
}
public static void readSequenceFile(String filePath){
Configuration conf = new Configuration();
Path path = new Path(filePath);
try( SequenceFile.Reader reader =
new SequenceFile.Reader(conf,SequenceFile.Reader.file(path));) {
IntWritable key = new IntWritable();
Text value = new Text();
while(reader.next(key,value)){
System.out.printf("%s\t%s\n", key.toString(),value.toString());
}
} catch (IOException e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
readSequenceFile("d:/file.sequence");
}
}
package chapter07.compress;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import static org.apache.hadoop.io.SequenceFile.*;
import static org.apache.hadoop.io.SequenceFile.Reader.*;
public class SquenceFileDemo1 {
public static void createSequenceFile(String dirPath,String seqPath){
Configuration conf = new Configuration();
conf.set("fs.defaultFS","hdfs://node-1:9000");
System.setProperty("HADOOP_USER_NAME","niit");
Text key = new Text();
BytesWritable value = new BytesWritable();
Path path = new Path(seqPath);
try(Writer writer = createWriter(conf,
Writer.file(path),
Writer.keyClass(key.getClass()),
Writer.valueClass(value.getClass()));
FileSystem fs = FileSystem.get(conf);)
{
RemoteIterator<LocatedFileStatus> iterator = fs.listFiles(new Path(dirPath),false);
while (iterator.hasNext()){
LocatedFileStatus fileStatus = iterator.next();
key.set(fileStatus.getPath().toString());
byte[] buf = new byte[(int) fileStatus.getLen()];
try( FSDataInputStream dis = fs.open(fileStatus.getPath());
) {
IOUtils.readFully(dis,buf,0,buf.length);
}
value.set(buf,0,buf.length);
writer.append(key,value);
}
} catch (IOException e) {
e.printStackTrace();
}
}
public static void readSequenceFile(String dstPath,String seqPath){
Configuration conf = new Configuration();
conf.set("fs.defaultFS","hdfs://node-1:9000");
System.setProperty("HADOOP_USER_NAME","niit");
Path path = new Path(seqPath);
try (Reader reader = new Reader(conf, Reader.file(path));
FileSystem fs = FileSystem.get(conf);
){
Text key = new Text();
BytesWritable value = new BytesWritable();
while(reader.next(key,value)) {
FSDataOutputStream dos = fs.create(new Path(key.toString()));
dos.write(value.copyBytes());
}
} catch (IOException e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
createSequenceFile("/logs","/logs.seqence");
readSequenceFile("/","/logs.seqence");
}
}