BlockLocation[]blkLocations=hdfs.getFileBlockLocations(fileStatus,0,fileStatus.getLen()); //这个地方,作者写错了,需要把path改为fileStatus intblkCount=blkLocations.length; for(inti=0;i<blkCount;i++){ String[]hosts=blkLocations[i].getHosts(); // Do something with the block hosts }
8. Get a list of all the nodes host names in the HDFS cluster
his method casts the FileSystem Object to a DistributedFileSystem Object.
This method will work only when Hadoop is configured as a cluster.
Running Hadoop on the local machine only, in a non cluster configuration will
try{ // Get a list of all the nodes host names in the HDFS cluster
FileSystem fs=FileSystem.get(conf); DistributedFileSystem hdfs=(DistributedFileSystem)fs; DatanodeInfo[]dataNodeStats=hdfs.getDataNodeStats(); String[]names=newString[dataNodeStats.length]; System.out.println("list of all the nodes in HDFS cluster:");//print info
for(inti=0;i<dataNodeStats.length;i++){ names[i]=dataNodeStats[i].getHostName(); System.out.println(names[i]);//print info
System.out.println("create and write ["+f.getName()+"] to hdfs:"); FSDataOutputStream os=fs.create(f,true,0); for(inti=0;i<10;i++){ os.writeChars("test hdfs "); } os.writeChars("\n"); os.close();
//get the locations of a file in HDFS
System.out.println("locations of file in HDFS:"); FileStatus filestatus=fs.getFileStatus(f); BlockLocation[]blkLocations=fs.getFileBlockLocations(filestatus,0,filestatus.getLen()); intblkCount=blkLocations.length; for(inti=0;i<blkCount;i++){ String[]hosts=blkLocations[i].getHosts(); //Do sth with the block hosts
System.out.println(hosts); }
//get HDFS file last modification time
longmodificationTime=filestatus.getModificationTime();// measured in milliseconds since the epoch
Dated=newDate(modificationTime); System.out.println(d); //reading from HDFS
System.out.println("read ["+f.getName()+"] from hdfs:"); FSDataInputStream dis=fs.open(f); System.out.println(dis.readUTF()); dis.close();