2020-10-08

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;

import java.io.IOException;

public class SquenceFileDemo {

    private static final String[] DATA = {
      "One,two,buckle my shoe",
      "Three,four,shut the dooe",
      "Hello hadiiop"
    };

    public static void createSequenceFile(String filePath){
        Configuration conf = new Configuration();
        Path path = new Path(filePath);

        IntWritable key = new IntWritable();
        Text value = new Text();
        try( SequenceFile.Writer writer = SequenceFile.createWriter(conf,
                SequenceFile.Writer.file(path),
                SequenceFile.Writer.keyClass(key.getClass()),
                SequenceFile.Writer.keyClass(value.getClass()));
        ) {
            //3.创建SequenceFile。Writer对象

            for(int i = 0;i < 100; i++){
                key.set(100 - i);
                value.set(DATA[i % DATA.length]);

                //4.用append方法在文件末尾追加键值对
                writer.append(key,value);
            }

        } catch (IOException e) {
            e.printStackTrace();
        }
    }
    public static  void readSequenceFile(String filePath){
        Configuration conf = new Configuration();
        Path path = new Path(filePath);

        try( SequenceFile.Reader reader =
                     new SequenceFile.Reader(conf,SequenceFile.Reader.file(path));) {


            IntWritable key = new IntWritable();
            Text value = new Text();

            while(reader.next(key,value)){
                System.out.printf("%s\t%s\n", key.toString(),value.toString());
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
    public static void main(String[] args) {
        //createSequenceFile("d:/file.sequence");
        readSequenceFile("d:/file.sequence");
    }
}
package chapter07.compress;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;

import java.io.File;
import java.io.IOException;
import java.net.URI;

import static org.apache.hadoop.io.SequenceFile.*;
import static org.apache.hadoop.io.SequenceFile.Reader.*;

public class SquenceFileDemo1 {
    public static void createSequenceFile(String dirPath,String seqPath){
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS","hdfs://node-1:9000");
        System.setProperty("HADOOP_USER_NAME","niit");

        Text key = new Text();
        BytesWritable value = new BytesWritable();

        Path path = new Path(seqPath);
        try(Writer writer = createWriter(conf,
                Writer.file(path),
                Writer.keyClass(key.getClass()),
                Writer.valueClass(value.getClass()));
            FileSystem fs = FileSystem.get(conf);)
        {
           RemoteIterator<LocatedFileStatus> iterator = fs.listFiles(new Path(dirPath),false);
            while (iterator.hasNext()){
                LocatedFileStatus fileStatus = iterator.next();
                key.set(fileStatus.getPath().toString());

                byte[] buf = new byte[(int) fileStatus.getLen()];
                try( FSDataInputStream dis = fs.open(fileStatus.getPath());
                     ) {
                    IOUtils.readFully(dis,buf,0,buf.length);
                }
                value.set(buf,0,buf.length);

                writer.append(key,value);
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    public static void readSequenceFile(String dstPath,String seqPath){
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS","hdfs://node-1:9000");
        System.setProperty("HADOOP_USER_NAME","niit");

        Path path = new Path(seqPath);

        try (Reader reader = new Reader(conf, Reader.file(path));
            FileSystem fs = FileSystem.get(conf);
        ){
            Text key = new Text();
            BytesWritable value = new BytesWritable();
            while(reader.next(key,value)) {
                FSDataOutputStream dos = fs.create(new Path(key.toString()));
                dos.write(value.copyBytes());
            }
        } catch (IOException e) {
            e.printStackTrace();
        }


        }

    public static void main(String[] args) {
        createSequenceFile("/logs","/logs.seqence");
        readSequenceFile("/","/logs.seqence");
    }
}
这是一个 SQL 语句,用于向借阅表中插入数据。该表包含以下字段:借阅编号、读者编号、书籍编号、借阅日期、归还日期、借阅状态。每条数据表示一次借阅记录。其中借阅编号、读者编号、书籍编号、借阅日期和借阅状态是必填项,归还日期为可选项,如果借阅状态为“已还”则必须填写归还日期。 具体插入的数据如下: - 借阅编号:100001,读者编号:123413,书籍编号:0001,借阅日期:2020-11-05,归还日期:NULL,借阅状态:借阅 - 借阅编号:100002,读者编号:223411,书籍编号:0002,借阅日期:2020-9-28,归还日期:2020-10-13,借阅状态:已还 - 借阅编号:100003,读者编号:321123,书籍编号:1001,借阅日期:2020-7-01,归还日期:NULL,借阅状态:过期 - 借阅编号:100004,读者编号:321124,书籍编号:2001,借阅日期:2020-10-09,归还日期:2020-10-14,借阅状态:已还 - 借阅编号:100005,读者编号:321124,书籍编号:0001,借阅日期:2020-10-15,归还日期:NULL,借阅状态:借阅 - 借阅编号:100006,读者编号:223411,书籍编号:2001,借阅日期:2020-10-16,归还日期:NULL,借阅状态:借阅 - 借阅编号:100007,读者编号:411111,书籍编号:1002,借阅日期:2020-9-01,归还日期:2020-9-24,借阅状态:已还 - 借阅编号:100008,读者编号:411111,书籍编号:0001,借阅日期:2020-9-25,归还日期:NULL,借阅状态:借阅 - 借阅编号:100009,读者编号:411111,书籍编号:1001,借阅日期:2020-10-08,归还日期:NULL,借阅状态:借阅
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值