我正在尝试使用 terraform 在 DataBricks 工作区中自动化和配置 DataBricks 集群。
我们有预定义的库,需要在创建集群时安装,我们确实有大约 20 个库
variable"libraries"{
default = ["pypi","jar"]
}
variable"pythonpackages"{
default = ["conda","pymongo"]
}
variable"jarpackages"{
default = ["etc1","etc1"]
}
variable"repositories"{
default = ["internal1","internal2"]
}
resource "databricks_cluster" "shared_autoscaling" {
cluster_name = "Shared Autoscaling"
spark_version = data.databricks_spark_version.latest_lts.id
node_type_id = data.databricks_node_type.smallest.id
autotermination_minutes = 20
autoscale {
min_workers = 1
max_workers = 50
}
library {
pypi {
package = "dnspython"
}
}
library {
pypi {
package = "condo"
}
}
library {
pypi {
package = "kedro"
repo = "artifactory-url"
}
}
}
我正在尝试删除库属性重复。我for_each
在 library 属性中尝试过,但效果不佳。
我想要达到的目标
variable"libraries"{
default = ["pypi","jar"]
}
variable"pythonpackages"{
default = ["conda","pymongo"]
}
variable"jarpackages"{
default = ["etc1","etc1"]
}
variable"repositories"{
default = ["internal1","internal2"]
}
resource "databricks_cluster" "shared_autoscaling" {
cluster_name = "Shared Autoscaling"
spark_version = data.databricks_spark_version.latest_lts.id
node_type_id = data.databricks_node_type.smallest.id
autotermination_minutes = 20
autoscale {
min_workers = 1
max_workers = 50
}
library {
pypi {
package = ["condo","pymongo","listofpackages"]
}
}
}