我试图从eclipse编译hadoop的putmerge程序,但它没有工作,而不是在HDFS上创建一个文件,而是在我的localmachine(我安装了eclipse的地方)上创建文件。
看起来我的 conf 没有从 XML 文件中选择正确的配置。
编辑:
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class PlayWithHadoop {
public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();
//conf.set("fs.default.name", "hdfs://localhost:54310/user/hduser");
//conf.set("dfs.data.dir", "/user/hduser");
conf.addResource(new Path("/home/hduser/hadoop/conf/core-site.xml"));
conf.addResource(new Path("/home/hduser/hadoop/conf/hdfs-site.xml"));
FileSystem hdfs = FileSystem.get(conf);
FileSystem local = FileSystem.getLocal(conf);
Path inputDir = new Path(args[0]);
Path hdfsFile = new Path(args[1]);
try {
//hdfs.setWorkingDirectory(new Path("/user/hduser/hadoop"));
FileStatus[] inputFiles = local.listStatus(inputDir);
FSDataOutputStream out = hdfs.create(hdfsFile);
for (int i=0; i<inputFiles.length; i++) {
System.out.println(inputFiles[i].getPath().getName());
FSDataInputStream in =local.open(inputFiles[i].getPath());
System.out.println();
System.out.println(hdfs.getWorkingDirectory().toString());
byte buffer[] = new byte[256];
int bytesRead = 0;
while( (bytesRead = in.read(buffer)) > 0) {
out.write(buffer, 0, bytesRead);
}
in.close();
}
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}