【MapReduce】学习一

本文详细介绍了如何在Hadoop集群中编译并运行《权威指南》中的例3.2 Java代码,并通过实例演示了如何在本地与HDFS之间读写数据文件。

Hadoop集群中编译并运行《权威指南》中的例3.2

Java文件代码如下
package com.changtu;
import java.io.InputStream;
import java.net.URL;

import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.io.IOUtils;

public class Example3_1 {
	static {
		URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
	}
	public static void main(String[] args) throws Exception {
		InputStream in = null;
		try {
			in = new URL("hdfs://192.168.152.128:9000/user/hadoop/input/test1.txt").openStream();
			IOUtils.copyBytes(in, System.out, 4096, false);
		} finally {
			IOUtils.closeStream(in);
		}
	}
}

数据文件的读写

import java.io.File;
import java.io.FileInputStream;
import java.io.OutputStream;
import java.net.URI;
 
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
 
public class HomeWork2_2 {
    public static void main(String[] args) throws Exception {
       String localSrc = "/home/hadoop/soft/hadoop_examples/homework2_2.txt";
       String dst = "hdfs://192.168.152.128:9000/user/hadoop/input/homework2_2.txt";
      
       FileInputStream in = null;
       OutputStream out = null;
       Configuration conf = new Configuration();
      
       try {
           in = new FileInputStream(new File(localSrc));
           FileSystem fs = FileSystem.get(URI.create(dst), conf);
           out = fs.create(new Path(dst), new Progressable() {
                @Override
                public void progress() {
                    System.out.println(".");
                }
            });
          
           in.skip(100);
           byte[] newStr = new byte[20];
           int read = in.read(newStr);
           out.write(newStr);
          
       } finally {
           IOUtils.closeStream(in);
           IOUtils.closeStream(out);
       }
    }
}

import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class HomeWork2_3 {
	public static void main(String[] args) throws Exception {
		String dfsSrc = "hdfs://192.168.152.128:9000/user/hadoop/input/homework2_3.txt";
		String dst = "/home/hadoop/soft/hadoop_examples/homework2_3.txt";
		
		FSDataInputStream in = null;
		OutputStream out = null;
		Configuration conf = new Configuration();
		
		try {
			FileSystem fs = FileSystem.get(URI.create(dfsSrc), conf);
			in = fs.open(new Path(dfsSrc));
			out = new FileOutputStream(new File(dst));
			
			in.skip(100);
			byte[] newStr = new byte[20];
			int read = in.read(newStr);
			out.write(newStr);
			
		} finally {
			IOUtils.closeStream(in);
			IOUtils.closeStream(out);
		}
	}
}


评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值