0

我可以使用 hadoop jar 命令运行该作业。但是当我尝试使用 oozie 安排工作时,我无法做到这一点。另外请让我知道错误是由于 hbase 表中的数据还是由于 xml 文件造成的。

工作流 xml 文件如下:

<workflow-app xmlns="uri:oozie:workflow:0.1" name="java-main-wf">
    <start to="java-node"/>
    <action name="java-node">

       <java>
               <job-tracker>00.00.00.116:00000</job-tracker>
            <name-node>hdfs://00.00.000.116:00000</name-node>
            <configuration>
                <property>
                    <name>mapred.job.queue.name</name>
                    <value>${queueName}</value>
                </property>
            <property>
                <name>hbase.zookeeper.property.clientPort</name>
                <value>2181</value>
                </property>
            <property>
                <name>hbase.zookeeper.quorum</name>
                <value>aaaaaa0000002d:2888:3888,bbbbbb000000d:2888:3888,bbbbbb000000d:2888:3888</value>
            </property>
            <property>
                 <name>hbase.master</name>
                  <value>aaaaaa000000d:60000</value>
             </property>

            <property>
                 <name>hbase.rootdir</name>
                 <value>hdfs://aaaa000000d:54310/hbase</value>
            </property>

            </configuration>
             <main-class>com.cf.mapreduce.nord.GetSuggestedItemsForViewsCarts</main-class>
                   </java> 



<map-reduce>
            <job-tracker>1000.0000.00.000</job-tracker>
            <name-node>hdfs://10.00.000.000:00000</name-node>

            <configuration>

    <property>
          <name>mapred.mapper.new-api</name>
          <value>true</value>
        </property>
        <property>
          <name>mapred.reducer.new-api</name>
          <value>true</value>
        </property>
                <property>
                    <name>mapred.job.queue.name</name>
                    <value>${queueName}</value>
                </property>
                <property>
                    <name>mapreduce.map.class</name>
                    <value>mahout.cf.mapreduce.nord.GetSuggestedItemsForViewsCarts$GetSuggestedItemsForViewsCartsMapper</value>
                </property>
                <property>
                    <name>mapreduce.reduce.class</name>
                    <value>mahout.cf.mapreduce.nord.GetSuggestedItemsForViewsCarts$GetSuggestedItemsForViewsCartsReducer</value>
                </property>

        <property>
            <name>hbase.mapreduce.inputtable</name>
            <value>${MAPPER_INPUT_TABLE}</value>
        </property>

        <property>
            <name>hbase.mapreduce.scan</name>
            <value>${wf:actionData('get-scanner')['scan']}</value>
        </property> 

        <property>
            <name>mapreduce.inputformat.class</name>
            <value>org.apache.hadoop.hbase.mapreduce.TableInputFormat</value>
        </property>
        <property>
        <name>mapreduce.outputformat.class</name>
        <value>org.apache.hadoop.mapreduce.lib.output.NullOutputFormat</value>
        </property>
                <property>
                    <name>mapred.map.tasks</name>
                    <value>1</value>
                </property>
             <property>
                    <name>mapred.reduce.tasks</name>
                    <value>10</value>
                </property>
<property>
<name>hbase.zookeeper.property.clientPort</name>
<value>2181</value>
</property>
<property>
<name>hbase.zookeeper.quorum</name>
<value>aaa000,aaaa0000,aaaa00000</value>
</property>
<property>
   <name>hbase.master</name>
     <value>blrkec242032d:60000</value>
   </property>

<property>
  <name>hbase.rootdir</name>
  <value>hdfs://aaaa0000:00010/hbase</value>
</property>





            </configuration>


        </map-reduce>  

映射器的错误日志是:

    Submitting Oozie action Map-Reduce job   
     <<< Invocation of Main class completed <<<        
    Failing Oozie Launcher, Main class [org.apache.oozie.action.hadoop.MapReduceMain], main() threw exception, No table was provided.
    java.io.IOException: No table was provided.     at org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.getSplits(TableInputFormatBase.java:130)          at org.apache.hadoop.mapred.JobClient.writeNewSplits(JobClient.java:962)
        at org.apache.hadoop.mapred.JobClient.writeSplits(JobClient.java:979)
        at org.apache.hadoop.mapred.JobClient.access$500(JobClient.java:170)
        at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:891)
        at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:844)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:396)
   org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
        at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:844)
        at org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:818)
    org.apache.oozie.action.hadoop.MapReduceMain.submitJob(MapReduceMain.java:91)
        at org.apache.oozie.action.hadoop.MapReduceMain.run(MapReduceMain.java:57)
        at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:37)
        at org.apache.oozie.action.hadoop.MapReduceMain.main(MapReduceMain.java:40)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
  sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
     org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:454)
        at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:50)
        at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:393)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:327)
        at org.apache.hadoop.mapred.Child$4.run(Child.java:270)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:396)at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
        at org.apache.hadoop.mapred.Child.main(Child.java:264)

    Oozie Launcher failed, finishing Hadoop job gracefully          
    Oozie Launcher ends    
    syslog logs    
    2012-12-11 10:21:18,472 WARN org.apache.hadoop.mapred.JobClient: No job jar file set.  User classes may not be found. See JobConf(Class) or JobConf#setJar(String).
    2012-12-11 10:21:18,586 ERROR org.apache.hadoop.hbase.mapreduce.TableInputFormat: java.lang.NullPointerException
        at org.apache.hadoop.hbase.util.Bytes.toBytes(Bytes.java:404)
        at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:153)        org.apache.hadoop.hbase.mapreduce.TableInputFormat.setConf(TableInputFormat.java:91)
        at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:70)          at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:130)
        at org.apache.hadoop.mapred.JobClient.writeNewSplits(JobClient.java:959)
        at org.apache.hadoop.mapred.JobClient.writeSplits(JobClient.java:979)
        at org.apache.hadoop.mapred.JobClient.access$500(JobClient.java:170)
        at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:891)
        at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:844)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:396)       at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
        at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:844)
        at org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:818)         at org.apache.oozie.action.hadoop.MapReduceMain.submitJob(MapReduceMain.java:91)
        at org.apache.oozie.action.hadoop.MapReduceMain.run(MapReduceMain.java:57)
        at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:37)
        at org.apache.oozie.action.hadoop.MapReduceMain.main(MapReduceMain.java:40)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)            at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)         at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:454)
        at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:50)
        at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:393)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:327)
        at org.apache.hadoop.mapred.Child$4.run(Child.java:270)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:396)           at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
        at org.apache.hadoop.mapred.Child.main(Child.java:264)
4

1 回答 1

1

当您调用 时TableMapReduceUtil.initTableMapper(..),该实用程序方法正在配置许多作业属性,其中之一是要扫描的 HBase 表。

查看代码(@GrepCode),我可以看到此方法设置了以下属性:

<property>
  <name>hbase.mapreduce.inputtable</name>
  <value>CUSTOMER_INFO</value>
</property>
<property>
  <name>hbase.mapreduce.scan</name>
  <value>...</value>
</property>

输入表应该是您的表的名称,扫描属性是扫描信息的一些序列化(Base 64 编码版本)。在我看来,您最好的选择是手动运行作业,并通过作业跟踪器检查 job.xml 以查看设置值是什么。

请注意,您还需要设置减速器的属性(请参阅initTableReducerJob 方法中的源代码),再次检查 job.xml 中是否有手动提交的作业可能是您最好的选择。

于 2012-12-12T12:21:13.833 回答