2
private static String[] testFiles = new String[]     {"img01.JPG","img02.JPG","img03.JPG","img04.JPG","img06.JPG","img07.JPG","img05.JPG"};
 // private static String testFilespath = "/home/student/Desktop/images";
private static String testFilespath ="hdfs://localhost:54310/user/root/images";
//private static String indexpath = "/home/student/Desktop/indexDemo";
private static  String testExtensive="/home/student/Desktop/images";

public static class MapClass extends MapReduceBase
implements Mapper<Text, Text, Text, Text> {
private Text input_image = new Text();
private Text input_vector = new Text();
    @Override
public void map(Text key, Text value,OutputCollector<Text, Text> output,Reporter       reporter) throws IOException {

 System.out.println("CorrelogramIndex Method:");  
       String featureString;
int MAXIMUM_DISTANCE = 16;
AutoColorCorrelogram.Mode mode = AutoColorCorrelogram.Mode.FullNeighbourhood;
for (String identifier : testFiles) {
            try (FileInputStream fis = new FileInputStream(testFilespath + "/" +    identifier)) {
  //Document doc = builder.createDocument(fis, identifier);
//FileInputStream imageStream = new FileInputStream(testFilespath + "/" + identifier);
BufferedImage bimg = ImageIO.read(fis);
 AutoColorCorrelogram vd = new AutoColorCorrelogram(MAXIMUM_DISTANCE, mode);
                 vd.extract(bimg);
               featureString = vd.getStringRepresentation();
               double[] bytearray=vd.getDoubleHistogram();
              System.out.println("image: "+ identifier + " " + featureString );

        }
             System.out.println(" ------------- ");
input_image.set(identifier);
input_vector.set(featureString);
   output.collect(input_image, input_vector);
              }

     }
   }

  public static class Reduce extends MapReduceBase
  implements Reducer<Text, Text, Text, Text> {

    @Override
public void reduce(Text key, Iterator<Text> values,
                   OutputCollector<Text, Text> output, 
                   Reporter reporter) throws IOException {
  String out_vector="";

  while (values.hasNext()) {
   out_vector.concat(values.next().toString());
 }
  output.collect(key, new Text(out_vector));
  }
}

static int printUsage() {
System.out.println("image_mapreduce [-m <maps>] [-r <reduces>] <input> <output>");
ToolRunner.printGenericCommandUsage(System.out);
return -1;
}


@Override
  public int run(String[] args) throws Exception {
JobConf conf = new JobConf(getConf(), image_mapreduce.class);
conf.setJobName("image_mapreduce");

// the keys are words (strings)
conf.setOutputKeyClass(Text.class);
// the values are counts (ints)
conf.setOutputValueClass(Text.class);

conf.setMapperClass(MapClass.class);        
//  conf.setCombinerClass(Reduce.class);
conf.setReducerClass(Reduce.class);

List<String> other_args = new ArrayList<String>();
for(int i=0; i < args.length; ++i) {
  try {
    if ("-m".equals(args[i])) {
      conf.setNumMapTasks(Integer.parseInt(args[++i]));
    } else if ("-r".equals(args[i])) {
      conf.setNumReduceTasks(Integer.parseInt(args[++i]));
    } else {
      other_args.add(args[i]);
    }
  } catch (NumberFormatException except) {
    System.out.println("ERROR: Integer expected instead of " + args[i]);
    return printUsage();
  } catch (ArrayIndexOutOfBoundsException except) {
    System.out.println("ERROR: Required parameter missing from " +
                       args[i-1]);
    return printUsage();
  }
}



   FileInputFormat.setInputPaths(conf, other_args.get(0));
    //FileInputFormat.setInputPaths(conf,new    Path("hdfs://localhost:54310/user/root/images"));
FileOutputFormat.setOutputPath(conf, new Path(other_args.get(1)));

JobClient.runJob(conf);
return 0;
}


 public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new image_mapreduce(), args);
System.exit(res);
 }

}

`我正在编写一个程序,它将多个图像文件作为输入,存储在 hdfs 中并在 map 函数中提取特征。如何指定在 FileInputStream(一些参数)中读取图像的路径?或者有什么方法可以读取多个图像文件?

我想要做的是: -- 以 hdfs 中的多个图像文件作为输入 -- 在 map 函数中提取特征。-- 迭代地减少。请帮助我编写代码或更好的方法。

4

2 回答 2

1

研究使用HIPI 库- 它将图像集合存储到 ImageBundle 中(这比将单个图像文件存储在 HDFS 中更有效)。他们也有几个例子。

至于您的代码,您需要指定您计划使用的输入和输出格式。没有当前的输入格式可以传递整个文件,但是您可以扩展 FileInputFormat 并创建一个发出<Text, BytesWritable>对的 RecordReader,其中键是文件名,值是图像文件的字节。

事实上Hadoop - The Definitive Guide有一个这种精确输入格式的例子:

于 2012-05-30T10:56:16.507 回答
0

如果您想将所有图像作为输入发送到 MR 任务,只需将 conf.setFileInputPath() 设置为输入目录如果您想在特定文件夹中发送选择性图像您可以在设置 conf.设置文件输入路径();

一种方法是为每个图像创建一个 Path[] 。或者只是将其设置为所有路径的逗号分隔字符串。浏览以下文档

http://hadoop.apache.org/docs/current/api/org/apache/hadoop/mapred/FileInputFormat.html

还有一件事,您必须将 Map 输入格式设置为 Text,ByteArray 从该 ByteArray 输入中获取图像特征,而不是创建新的文件输入流。

于 2013-07-04T10:07:28.073 回答