文章目录
伪集群的一键安装
推荐一篇不错的文章,真的一行命令自动配置、自动运行,省时费力
使用Windows连接hadoop需要下载安装好winutils.exe、hadoop.dll等插件才能正常连接
SpringBoot使用HDFS做文件内容的读取和追加
这里假设云服务器的ip为:192.168.110.120
记得修改Windows的hosts文件使主机的datanode名称能映射成192.168.110.120
HadoopConfig
@Configuration
public class HadoopConfig {
@Bean("hdfsConfig")
public org.apache.hadoop.conf.Configuration hdfsChannel(){
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
conf.set("dfs.replication", "1");
conf.set("dfs.client.use.datanode.hostname", "true");
conf.set("mapred.job.tracker", "hdfs://192.168.110.120:8020/");
conf.set("fs.defaultFS", "hdfs://192.168.110.120:8020/");
System.setProperty("HADOOP_USER_NAME","root");
return conf;
}
@Bean("fileSystem")
public FileSystem createFs(@Qualifier("hdfsConfig") org.apache.hadoop.conf.Configuration conf){
FileSystem fs = null;
try {
URI uri = new URI("hdfs://192.168.110.120:8020/");
fs = FileSystem.get(uri,conf);
} catch (Exception e) {
e.printStackTrace();
}
return fs;
}
}
HadoopController
@RestController
@RequestMapping(value = "/hadoop")
public class HadoopController {
@Autowired
private HadoopTemplate template;
@GetMapping(value = "/read")
public String read(@RequestParam("name") String name){
template.readTo(template.getNameSpace(),"/"+name,4096);
return "read success";
}
@GetMapping(value = "/write")
public String weite(@Re