hadoop之hdfs的java接口调用和hadoop的rpc调用

hadoop之hdfs的java接口调用

package hadoop.hdfs;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Before;
import org.junit.Test;

public class HDFSDemo {
	
	private FileSystem fs = null;
	@Before
	public void init() throws IOException, URISyntaxException, InterruptedException{
		fs = FileSystem.get(new URI("hdfs://192.168.23.127:9000"), new Configuration(),"root");
	}
	@Test
	public void  testDel() throws IllegalArgumentException, IOException{
		boolean flag = fs.delete(new Path("/words.txt"), true);
		System.out.println(flag);
	}
	@Test
	public void testMkdir() throws IllegalArgumentException, IOException{
		boolean flag = fs.mkdirs(new Path("/itcast88888888"));
		System.out.println(flag);
	}
	@Test
	public void testUpload() throws IllegalArgumentException, IOException{
		FSDataOutputStream out = fs.create(new Path("/words.txt"));
		
		FileInputStream in = new FileInputStream(new File("c:/w.txt"));
		
		IOUtils.copyBytes(in, out, 2048, true);
	}

	public static void main(String[] args) throws IOException, URISyntaxException {
		
		FileSystem fs = FileSystem.get(new URI("hdfs://192.168.23.127:9000"), new Configuration());
		
		InputStream in = fs.open(new Path("/jdk.avi"));
		
		FileOutputStream out = new FileOutputStream(new File("c:/jdk123456"));
		
		IOUtils.copyBytes(in, out, 2048, true);

	}

}

hadoop的rpc调用

Bizable.java

package hadoop.rpc;

public interface Bizable {
	public static final long versionID = 100000;
	
	public String sayHi(String name);
}

RPCServer.java

package hadoop.rpc;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RPC.Builder;
import org.apache.hadoop.ipc.RPC.Server;

public class RPCServer implements Bizable {
	
	public String sayHi(String name){
		return "Hi~ "+name;
	}
	
	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration();
		//192.168.199.211为本机IP
		Server server = new RPC.Builder(conf).setProtocol(Bizable.class).setInstance(new RPCServer()).setBindAddress("192.168.199.211").setPort(9000).build();
		server.start();
	}

}

RPCClient.java

package hadoop.rpc;

import java.io.IOException;
import java.net.InetSocketAddress;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;

public class RPCClient implements Bizable {

	@Override
	public String sayHi(String name) {
		// TODO Auto-generated method stub
		return null;
	}

	public static void main(String[] args) throws IOException {
		Bizable proxy = RPC.getProxy(Bizable.class, 10010, new InetSocketAddress("192.168.199.211",9000), new Configuration());
		String result = proxy.sayHi("tomcat");
		System.out.println(result);
		RPC.stopProxy(proxy);
	}

}

其实hadoop的rpc调用就是对socket的封装,虽然粗狂但是简单直接方法。



评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

e421083458

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值