3


我使用 Flink 1.11.2、Hive 2.1.1、Java 8。尝试对 Hive 执行远程查询,将其打包在 jar 中并通过 Flink 的 RestClient 运行:

private static String jar = "/path/Job.jar";
Configuration config = RemoteConfiguration.getConfiguration(host, port);
PackagedProgram packagedProgram = PackagedProgram.newBuilder()
                                                     .setJarFile(new File(jar))
                                                     .setArguments(arguments)
                                                     .build();
    RestClusterClient<StandaloneClusterId> client =
        new RestClusterClient<StandaloneClusterId>(config, StandaloneClusterId.getInstance());
    JobGraph jobGraph = PackagedProgramUtils.createJobGraph(packagedProgram, config, 1, false);
    client.submitJob(jobGraph).get();

工作在哪里:

StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
    DataStreamSource<String> source = streamExecutionEnvironment.fromElements(
        tableName
    );
    source
        .map(new MapFunction<String, String>() {
          String hiveConfDir = "hive-conf";
          String hiveCatalogName = "myhive";
          String databaseName = "default";
          String location = "'hdfs:///tmp/location'";

          @Override
          public String map(String tableName) {
            HiveCatalog hive = new HiveCatalog(hiveCatalogName, databaseName, hiveConfDir, "2.1.1");
            EnvironmentSettings batchSettings = EnvironmentSettings.newInstance().useBlinkPlanner().build();
            TableEnvironment tableEnv = TableEnvironment.create(batchSettings);
            tableEnv.registerCatalog(hiveCatalogName, hive);
            tableEnv.useCatalog(hiveCatalogName);
            tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);

            hive.getHiveConf().set("hive.vectorized.execution.enabled", "false");
            hive.getHiveConf().set("hive.vectorized.execution.reduce.enabled", "false");
            hive.getHiveConf().set("hive.vectorized.execution.reduce.groupby.enabled", "false");
            tableEnv.executeSql("CREATE TABLE " + tableName + "(\n"
                                    + "  test INT,\n"
                                    + "  age INT\n"
                                    + ") STORED AS ORC LOCATION " + location + " TBLPROPERTIES ('orc'\n"
                                    + "'.compress'='NONE')");
            
            return tableName;
          }
        })
        .print();
    streamExecutionEnvironment.execute();

在 flink-conf.yaml 中只有一个附加参数:

env.java.home: /path/to/JAVA_HOME

当我运行它时,每隔一段时间就会出现这些错误:

java.lang.OutOfMemoryError: Java heap space

或者:

MetaException(message:Got exception: java.lang.ClassCastException class [Ljava.lang.Object; cannot be cast to class [Ljava.net.URI; ([Ljava.lang.Object; and [Ljava.net.URI; are in module java.base of loader 'bootstrap'))

你能解释一下吗?

4

0 回答 0