-1

//这3个程序需要什么包???如何将这 3 个程序合并为一个程序?如何在eclipse中使用这3个程序做mapreduce?

请帮我成功运行这个程序

操作系统:Linux

面临的异常:

  1. 对于 MaxPYear.MaxPubYearReducer 类型,方法 TryParseInt(String) 未定义

2.Job类型中的setInputFormatClass(Class)方法不适用于参数(Class)

映射器代码:

public static class MaxPubYearMapper extends Mapper<LongWritable , Text, IntWritable,Text>
    {
        public void map(LongWritable key, Text value , Context context)

                throws IOException, InterruptedException 
                {
            String delim = "\t";
            Text valtosend = new Text(); 
            String tokens[] = value.toString().split(delim);
            if (tokens.length == 2)
            {
                valtosend.set(tokens[0] + ";"+ tokens[1]);
                context.write(new IntWritable(1), valtosend);
            }

                }       
    }

减速机代码:

public static class MaxPubYearReducer extends Reducer<IntWritable ,Text, Text, IntWritable>
    {

        public void reduce(IntWritable key, Iterable<Text> values , Context context) throws IOException, InterruptedException
        {
            int maxiValue = Integer.MIN_VALUE;
            String maxiYear = "";
            for(Text value:values)          {
                String token[] = value.toString().split(";");
                if(token.length == 2 && TryParseInt(token[1]).intValue()> maxiValue)
                {
                    maxiValue = TryParseInt(token[1]);
                    maxiYear = token[0];
                }
            }
            context.write(new Text(maxiYear), new IntWritable(maxiValue));
        }
    }

驱动程序代码:

public static void main(String[] args) throws Exception  {
        Configuration conf = new Configuration(); 
        Job job = new Job(conf , "Frequency`enter code here`");
        job.setJarByClass(MaxPubYear.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        job.setMapperClass(FrequencyMapper.class);
        job.setCombinerClass(FrequencyReducer.class);
        job.setReducerClass(FrequencyReducer.class);


        job.setOutputFormatClass(TextOutputFormat.class);
        job.setInputFormatClass(TextInputFormat.class);

        FileInputFormat.addInputPath(job,new Path(args[0]));
        FileOutputFormat.setOutputPath(job,new Path(args[1]+ "_temp"));
        int exitCode = job.waitForCompletion(true)?0:1; 

        if (exitCode == 0 )
        {
            Job SecondJob = new Job(conf, "Maximum Publication year");
            SecondJob.setJarByClass(MaxPubYear.class);

            SecondJob.setOutputKeyClass(Text.class);
            SecondJob.setOutputValueClass(IntWritable.class);

            SecondJob.setMapOutputKeyClass(IntWritable.class);
            SecondJob.setMapOutputValueClass(Text.class);

            SecondJob.setMapperClass(MaxPubYearMapper.class);               
            SecondJob.setReducerClass(MaxPubYearReducer.class);

            FileInputFormat.addInputPath(SecondJob,new Path(args[1]+ "_temp"));
            FileOutputFormat.setOutputPath(SecondJob,new Path(args[1]));
            System.exit(SecondJob.waitForCompletion(true)?0:1);                 

        }
    }
4

1 回答 1

0

把它们一起写在一堂课上

所需的软件包是:

package org.myorg;

import java.io.IOException;
import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
import java.io.DataInput;
import java.io.DataOutput;

这里可能有一些额外的东西,因为我从我的代码中复制了它们。

package org.myorg;

import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

public class <your classname as well as filename> { 
    public static class MaxPubYearMapper extends Mapper<LongWritable , Text, IntWritable,Text>
    {
        public void map(LongWritable key, Text value , Context context)

                throws IOException, InterruptedException 
                {
            String delim = "\t";
            Text valtosend = new Text(); 
            String tokens[] = value.toString().split(delim);
            if (tokens.length == 2)
            {
                valtosend.set(tokens[0] + ";"+ tokens[1]);
                context.write(new IntWritable(1), valtosend);
            }

                }       
    }

    public static class MaxPubYearReducer extends Reducer<IntWritable ,Text, Text, IntWritable>
    {

        public void reduce(IntWritable key, Iterable<Text> values , Context context) throws IOException, InterruptedException
        {
            int maxiValue = Integer.MIN_VALUE;
            String maxiYear = "";
            for(Text value:values)          {
                String token[] = value.toString().split(";");
                if(token.length == 2 && TryParseInt(token[1]).intValue()> maxiValue)
                {
                    maxiValue = TryParseInt(token[1]);
                    maxiYear = token[0];
                }
            }
            context.write(new Text(maxiYear), new IntWritable(maxiValue));
        }
    }
    public static void main(String[] args) throws Exception  {
        Configuration conf = new Configuration(); 
        Job job = new Job(conf , "Frequency`enter code here`");
        job.setJarByClass(MaxPubYear.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        job.setMapperClass(FrequencyMapper.class);
        job.setCombinerClass(FrequencyReducer.class);
        job.setReducerClass(FrequencyReducer.class);


        job.setOutputFormatClass(TextOutputFormat.class);
        job.setInputFormatClass(TextInputFormat.class);

        FileInputFormat.addInputPath(job,new Path(args[0]));
        FileOutputFormat.setOutputPath(job,new Path(args[1]+ "_temp"));
        int exitCode = job.waitForCompletion(true)?0:1; 

        if (exitCode == 0 )
        {
            Job SecondJob = new Job(conf, "Maximum Publication year");
            SecondJob.setJarByClass(MaxPubYear.class);

            SecondJob.setOutputKeyClass(Text.class);
            SecondJob.setOutputValueClass(IntWritable.class);

            SecondJob.setMapOutputKeyClass(IntWritable.class);
            SecondJob.setMapOutputValueClass(Text.class);

            SecondJob.setMapperClass(MaxPubYearMapper.class);               
            SecondJob.setReducerClass(MaxPubYearReducer.class);

            FileInputFormat.addInputPath(SecondJob,new Path(args[1]+ "_temp"));
            FileOutputFormat.setOutputPath(SecondJob,new Path(args[1]));
            System.exit(SecondJob.waitForCompletion(true)?0:1);                 

        }
    }
}
于 2014-09-01T07:30:30.500 回答