0

I am having a problem when I run my Hadoop Jobs via OOzie. I have run the jobs normally with the following configuration:

public int run(String[] args) throws Exception {
        // TODO Auto-generated method stub
        Job job = new Job(getConf(), "GCAnalyzer");

        job.setJarByClass(GCMapper.class);

        job.setMapperClass(getMapperClass());
        job.setReducerClass(getReducerClass());

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);

        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(TextOutputFormat.class);

        FileInputFormat.addInputPath(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        job.setNumReduceTasks(1);

        return job.waitForCompletion(true) ? 0 : 1;
    }

    public static void main(String args[]) throws Exception {
        int res = ToolRunner.run(new Configuration(), new GCMapper(), args);
        System.exit(res);
    }

When I run the same job with the following OOzie config I get error:

<workflow-app xmlns="uri:oozie:workflow:0.2" name="log_analysis">
<start to="gclogs" /> 
<action name="gclogs">

<map-reduce>
    <job-tracker>${jobTracker}</job-tracker> 
    <name-node>${nameNode}</name-node> 
    <prepare>
    <delete path="${gcouput}"/>
    </prepare>

    <configuration>

<property>
  <name>mapred.mapper.new-api</name> 
  <value>true</value> 
  </property>

 <property>
  <name>mapred.reducer.new-api</name> 
  <value>true</value> 
  </property>

<property>
<name>mapred.map.class</name> 
<value>${gcmapper}</value>
</property>

<property>
<name>mapreduce.reduce.class</name> 
<value>${gcreducer}</value> 
</property>



<property>
<name>mapred.mapoutput.key.class</name> 
 <value>org.apache.hadoop.io.Text</value> 
 </property>

<property>
 <name>mapred.mapoutput.value.class</name> 
 <value>org.apache.hadoop.io.NullWritable</value> 
 </property>

<property> 
 <name>mapreduce.inputformat.class</name> 
 <value>org.apache.hadoop.mapreduce.lib.input.TextInputFormat</value> 
 </property>


 <property>
  <name>mapreduce.outputformat.class</name> 
  <value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat</value> 
  </property>



<property>
<name>mapred.input.dir</name> 
<value>${gcinput}</value>
</property>

<property>
<name>mapred.output.dir</name> 
<value>${gcoutput}</value>
</property>

<property>
  <name>mapred.reduce.tasks</name> 
  <value>1</value> 
  </property>

<property>
  <name>mapred.job.queue.name</name> 
  <value>default</value> 
  </property>


</configuration>
</map-reduce>
<ok to="end" /> 
 <error to="fail" /> 
</action>

 <kill name="fail">
  <message>Map/Reduce failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> 
  </kill>
  <end name="end" /> 

</workflow-app>

Job properties file as follows:

oozie.wf.application.path=${nameNode}/user/svc_hdp/ozzie/apps/log_analyzer
oozie.libpath=${nameNode}/user/svc_hdp/oozie/apps/share-lib

nameNode=hdfs://path
jobTracker=path
queueName=default


# GCMApred
gcinput=${nameNode}/user/a.ntimbadi/inputgc
gcoutput=${nameNode}/user/a.ntimbadi/ouputgc
gcmapper=edu.*******.hadoop.log.GCMapper$CasMap
gcreducer=org.apache.hadoop.mapreduce.Reducer

Error is as follows:

java.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.Text, recieved org.apache.hadoop.io.LongWritable

I am sure that my hadoop code is fine since it runs when I run it manually. Something wrong with my oozie config. Please let me know if there is an obvious error.

4

1 回答 1

0

哦,我在这里遇到了错误:mapred.map.class 应该是 mapreduce.map.class

于 2013-08-06T15:59:01.853 回答