如何为未分区的现有 Iceberg 表添加分区?表已经加载了数据。
表已创建:
import org.apache.iceberg.hive.HiveCatalog
import org.apache.iceberg.catalog._
import org.apache.iceberg.spark.SparkSchemaUtil
import org.apache.iceberg.PartitionSpec
import org.apache.spark.sql.SaveMode._
val df1 = spark
.range(1000)
.toDF
.withColumn("level",lit("something"))
val catalog = new HiveCatalog(spark.sessionState.newHadoopConf())
val icebergSchema = SparkSchemaUtil.convert(df1.schema)
val icebergTableName = TableIdentifier.of("default", "icebergTab")
val icebergTable = catalog
.createTable(icebergTableName, icebergSchema, PartitionSpec.unpartitioned)
有什么建议么?