1

甚至面临将数据从文本格式表加载到 orc 格式表的相同问题。当我尝试将数据从文本格式表加载到 orc 表时,我收到此错误“线程“主”java.sql.SQLException 中的异常:处理语句时出错:FAILED:执行错误,从 org.apache.hadoop.hive.ql.exec.mr.MapRedTask 返回代码 1”

这是我的代码:

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import org.apache.hive.jdbc.HiveDriver;

public class HiveJdbcClient {

private static String driverName = "org.apache.hive.jdbc.HiveDriver";
public static void main(String[] args) throws SQLException {
        try {
        Class.forName(driverName);
        } catch (ClassNotFoundException e) {
        e.printStackTrace();
        System.exit(1);
        }
        Connection con = DriverManager.getConnection("jdbc:hive2://hdn01.sasken.com:10000/default", "","");
        System.out.println("Connected to Hive");
        Statement stmt = con.createStatement();
        String tempTable = "tempTable";
        String orcTable= "orcTable";
        stmt.execute("CREATE TABLE IF NOT EXISTS "+ tempTable +" (key int, value   string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE");
        stmt.execute("CREATE TABLE IF NOT EXISTS " + orcTable + "(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS ORC TBLPROPERTIES ('transactional'='true')");
        String filepath = "/home/ee208961/spark-hive/resto.csv";
        stmt.execute("LOAD DATA LOCAL INPATH '" + filepath + "' OVERWRITE INTO TABLE " + tempTable);
        stmt.execute("INSERT OVERWRITE TABLE "+ orcTable +" SELECT * FROM "+ tempTable);
        String sql = "select * from " + orcTable;
        System.out.println("Running: " + sql);
        ResultSet res = stmt.executeQuery(sql);
        while (res.next()) {
          System.out.println(String.valueOf(res.getInt(1)) + "\t" +   res.getString(2));
        }

在这一行出现错误“stmt.execute("INSERT OVERWRITE TABLE"+ orcTable +" SELECT * FROM "+ tempTable)"

任何人都可以帮我解决这个问题吗?感谢你。

4

0 回答 0