hadoop之hdfs的java接口调用
package hadoop.hdfs;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Before;
import org.junit.Test;
public class HDFSDemo {
private FileSystem fs = null;
@Before
public void init() throws IOException, URISyntaxException, InterruptedException{
fs = FileSystem.get(new URI("hdfs://192.168.23.127:9000"), new Configuration(),"root");
}
@Test
public void testDel() throws IllegalArgumentException, IOException{
boolean flag = fs.delete(new Path("/words.txt"), true);
System.out.println(flag);
}
@Test
public void testMkdir() throws IllegalArgumentException, IOException{
boolean flag = fs.mkdirs(new Path("/itcast88888888"));
System.out.println(flag);
}
@Test
public void testUpload() throws IllegalArgumentException, IOException{
FSDataOutputStream out = fs.create(new Path("/words.txt"));
FileInputStream in = new FileInputStream(new File("c:/w.txt"));
IOUtils.copyBytes(in, out, 2048, true);
}
public static void main(String[] args) throws IOException, URISyntaxException {
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.23.127:9000"), new Configuration());
InputStream in = fs.open(new Path("/jdk.avi"));
FileOutputStream out = new FileOutputStream(new File("c:/jdk123456"));
IOUtils.copyBytes(in, out, 2048, true);
}
}
hadoop的rpc调用
Bizable.java
package hadoop.rpc;
public interface Bizable {
public static final long versionID = 100000;
public String sayHi(String name);
}
RPCServer.java
package hadoop.rpc;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RPC.Builder;
import org.apache.hadoop.ipc.RPC.Server;
public class RPCServer implements Bizable {
public String sayHi(String name){
return "Hi~ "+name;
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
//192.168.199.211为本机IP
Server server = new RPC.Builder(conf).setProtocol(Bizable.class).setInstance(new RPCServer()).setBindAddress("192.168.199.211").setPort(9000).build();
server.start();
}
}
RPCClient.java
package hadoop.rpc;
import java.io.IOException;
import java.net.InetSocketAddress;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;
public class RPCClient implements Bizable {
@Override
public String sayHi(String name) {
// TODO Auto-generated method stub
return null;
}
public static void main(String[] args) throws IOException {
Bizable proxy = RPC.getProxy(Bizable.class, 10010, new InetSocketAddress("192.168.199.211",9000), new Configuration());
String result = proxy.sayHi("tomcat");
System.out.println(result);
RPC.stopProxy(proxy);
}
}
其实hadoop的rpc调用就是对socket的封装,虽然粗狂但是简单直接方法。