尝试在 hadoop 上运行第一个程序时,我遇到了这个异常。(我在 0.20.2 版本上使用 hadoop 新 API)。我在网上搜索,似乎大多数人在配置逻辑中没有设置MapperClass和ReducerClass时都会遇到这个问题。但我检查了一下,看起来代码没问题。如果有人可以帮助我,我将不胜感激。
java.io.IOException:映射中的键类型不匹配:预期的 org.apache.hadoop.io.Text,在 org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect 处收到 org.apache.hadoop.io.LongWritable(MapTask .java:871)
package com.test.wc;
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
public class WordCountMapper extends Mapper<LongWritable,Text,Text,IntWritable> {
public void Map(LongWritable key,Text value,Context ctx) throws IOException , InterruptedException {
String line = value.toString();
for(String word:line.split("\\W+")) {
if(word.length()> 0){
ctx.write(new Text(word), new IntWritable(1));
}
}
}
}
package com.test.wc;
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
public class WordCountReducer extends Reducer<Text,IntWritable,Text,IntWritable> {
public void reduce(Text key, Iterable<IntWritable> values, Context ctx) throws IOException,InterruptedException {
int wordCount = 0;
for(IntWritable value:values)
{
wordCount+=value.get();
}
ctx.write(key,new IntWritable(wordCount));
}
}
package com.test.wc;
import java.io.IOException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class WordCountJob {
public static void main(String args[]) throws IOException, InterruptedException, ClassNotFoundException{
if(args.length!=2){
System.out.println("invalid usage");
System.exit(-1);
}
Job job = new Job();
job.setJarByClass(WordCountJob.class);
job.setJobName("WordCountJob");
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.setMapperClass(WordCountMapper.class);
job.setReducerClass(WordCountReducer.class);
//job.setCombinerClass(WordCountReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
System.exit(job.waitForCompletion(true) ? 0:1);
}
}