Hadoop RPC是Hadoop的一个重要部分,提供分布式环境下的对象调用功能。它主要是通过Java的动态代理(Dynamic Proxy)与反射(Reflect)实现,代理类是由java.lang.reflect.Proxy类在运行期时根据接口,采用Java反射功能动态生成的,并且结合java.lang.reflect.InvocationHandler来处理客户端的请求。下面给出使用Hadoop RPC的过程:
1. 代码:
(1)协议 - MyProtocol.java
import org.apache.hadoop.ipc.VersionedProtocol;
public interface MyProtocol extends VersionedProtocol {
public static final long versionID = 1L;
public String println(String string);
public int mul(int a, int b);
public int[] sortGreat(int[] nums, int len);
}
(2)服务器 - MyServer.java
import java.io.IOException;
import java.net.UnknownHostException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.ProtocolSignature;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RPC.Server;
public class MyServer implements MyProtocol {
private Server server;
public MyServer() {
try {
Configuration conf = new Configuration();
/* BindAddress和Port分别表示服务器的host和监听端口号,
* 而NnumHandlers表示服务器端处理请求的线程数目;
*/
server = new RPC.Builder(conf)
.setProtocol(MyServer.class).setInstance(this)
.setBindAddress("localhost").setPort(9999).setNumHandlers(5)
.build();
server.start();
System.out.println("Server is running: "
+ new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()));
server.join();
} catch (UnknownHostException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
//重载的方法,用于获取自定义的协议版本号
@Override
public long getProtocolVersion(String protocol, long clientVersion) throws IOException {
return MyProtocol.versionID;
}
//重载的方法,用于获取协议签名
@Override
public ProtocolSignature getProtocolSignature(String arg0, long arg1,
int arg2) throws IOException {
// TODO Auto-generated method stub
return new ProtocolSignature(MyProtocol.versionID, null);
}
@Override
public String println(String t) {
System.out.println("receive: " + t);
return "Hello " + t;
}
@Override
public int mul(int a, int b) {
System.out.println("receive: " + a + " " + b);
return a*b;
}
@Override
public int[] sortGreat(int[] nums, int len) {
System.out.print("receive: ");
for(int i=0; i<len; i++) System.out.print(nums[i] + " ");
System.out.println();
for(int i=0; i<len; i++) {
for(int j=i+1; j<len; j++) {
if(nums[i] > nums[j]) {
int tmp = nums[i];
nums[i] = nums[j];
nums[j] = tmp;
}
}
}
return nums;
}
public static void main(String[] args) {
new MyServer();
}
}
(3)客户端
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;
public class MyClient {
private MyProtocol proxy;
public MyClient() {
InetSocketAddress addr = new InetSocketAddress("localhost", 9999);
try {
//构造客户端代理对象,直接通过代理对象调用远程端的方法
proxy = (MyProtocol) RPC.waitForProxy(MyProtocol.class, MyProtocol.versionID, addr, new Configuration());
} catch (IOException e) {
e.printStackTrace();
}
}
public MyProtocol getProxy() {
return proxy;
}
public void close(){
RPC.stopProxy(proxy);
}
public static void main(String[] args) {
MyClient c = new MyClient();
Scanner cin = new Scanner(System.in);
//字符串
System.out.println("字符串:");
System.out.print("send: ");
String str = cin.next();
String strAns = c.getProxy().println(str);
System.out.println("receive: " + strAns);
System.out.println();
//相乘
System.out.println("相乘:");
System.out.print("send: ");
int a = cin.nextInt();
int b = cin.nextInt();
int cAns = c.getProxy().mul(a, b);
System.out.println("receive: " + cAns);
System.out.println();
//排序
System.out.println("排序:");
int[] nums = new int[1000];
System.out.print("send(end with #): ");
int len = 0;
while(cin.hasNext()) {
String tmp = cin.next();
if(tmp.equals("#")) break;
nums[len++] = Integer.parseInt(tmp);
}
int[] numsAns = c.getProxy().sortGreat(nums, len);
System.out.print("receive: ");
for(int i=0; i<len; i++) {
System.out.print(numsAns[i] + " ");
}
System.out.println();
c.close();
cin.close();
}
}
2. jar包
从hadoop-2.7.0.tar.gz中,取出运行所需的jar包,如下:
3. 编译
此处的编译其实使用的就是"javac"指令,但由于需要包含所有的需要的jar包,故还是有些不同。这里jar包的保存目录为:“E:\rpc\extJar”,而java源代码保存的目录为“E:\rpc”,故编译命令写成windows批处理文件为:
compile.bat
javac -cp extJar/commons-collections-3.2.1.jar;extJar/commons-configuration-1.6.jar;extJar/commons-io-2.4.jar;extJar/commons-lang-2.6.jar;extJar/commons-logging-1.1.3.jar;extJar/guava-11.0.2.jar;extJar/hadoop-auth-2.7.0.jar;extJar/hadoop-common-2.7.0.jar;extJar/htrace-core-3.1.0-incubating.jar;extJar/log4j-1.2.17.jar;extJar/protobuf-java-2.5.0.jar;extJar/slf4j-api-1.7.10.jar;extJar/slf4j-log4j12-1.7.10.jar; *.java
4. 运行
同理,服务器和客户端的运行脚本,如下:
server.bat
java -cp extJar/commons-collections-3.2.1.jar;extJar/commons-configuration-1.6.jar;extJar/commons-io-2.4.jar;extJar/commons-lang-2.6.jar;extJar/commons-logging-1.1.3.jar;extJar/guava-11.0.2.jar;extJar/hadoop-auth-2.7.0.jar;extJar/hadoop-common-2.7.0.jar;extJar/htrace-core-3.1.0-incubating.jar;extJar/log4j-1.2.17.jar;extJar/protobuf-java-2.5.0.jar;extJar/slf4j-api-1.7.10.jar;extJar/slf4j-log4j12-1.7.10.jar; MyServer
client.bat
java -cp extJar/commons-collections-3.2.1.jar;extJar/commons-configuration-1.6.jar;extJar/commons-io-2.4.jar;extJar/commons-lang-2.6.jar;extJar/commons-logging-1.1.3.jar;extJar/guava-11.0.2.jar;extJar/hadoop-auth-2.7.0.jar;extJar/hadoop-common-2.7.0.jar;extJar/htrace-core-3.1.0-incubating.jar;extJar/log4j-1.2.17.jar;extJar/protobuf-java-2.5.0.jar;extJar/slf4j-api-1.7.10.jar;extJar/slf4j-log4j12-1.7.10.jar; MyClient
5. 结果:
(1)文件目录
(2)编译结果
上图中最后给出警告,但还未找到解决方法,对运行结果也没有影响。
(3)运行结果
服务器:
客户端: