我正在创建一个用于与 azure 存储 blob 通信的类,它工作正常,但是如果我尝试将这个类放入包中,它会给我一个错误"error: not found: value dbutils"。如果我删除班级上方的“package Libraries.Custom” ,它工作正常。
我正在使用 Scala 在 azure databricks notebook 中创建一个类。谁能帮帮我。代码如下:
班级代码
package Libraries.Custom
import org.apache.spark.sql._
class BlobContext {
// Basic Azure Storage Configurations
val blob_account_name = "REPLACE_BY_ACTUAL_VALUE"
val blob_account_access_key = "REPLACE_BY_ACTUAL_VALUE"
val blob_container_name = "REPLACE_BY_ACTUAL_VALUE"
val blob_server = s"REPLACE_BY_ACTUAL_VALUE"
val blob_wasbs = s"REPLACE_BY_ACTUAL_VALUE"
val spark = SparkSession
.builder()
.appName("Path SOM")
.master("local[*]")
.config("spark.ui.enabled", "false")
.getOrCreate()
def SetConfigurations {
spark.conf.set(blob_server, blob_account_access_key)
}
def ReadData(fileName: String, fileType: String): DataFrame = {
SetConfigurations
val dataFrame = spark.read.format(fileType).option("header", "true").load(s"${blob_wasbs}${fileName}.${fileType}")
return dataFrame
}
def WriteData(fileDataFrame: DataFrame, fileName: String, fileType: String) {
val absTempFilePath = s"${blob_wasbs}SPARK_NEW_${fileName}.temp"
val absFilePath = s"${blob_wasbs}SPARK_NEW_${fileName}.${fileType}"
fileDataFrame.repartition(1).write.format(fileType).mode("overwrite").option("header", "true").option("inferSchema", "true").option("delimiter", ",").csv(absTempFilePath)
val partition_path = dbutils.fs.ls(absTempFilePath + "/").filter(file=>file.name.endsWith(".csv"))(0).path
dbutils.fs.cp(partition_path, absFilePath)
dbutils.fs.rm(absTempFilePath,recurse=true)
}
}
错误
<notebook>:37: error: not found: value dbutils
val partition_path = dbutils.fs.ls(absTempFilePath + "/").filter(file=>file.name.endsWith(".csv"))(0).path
^
<notebook>:38: error: not found: value dbutils
dbutils.fs.cp(partition_path, absFilePath)
^
<notebook>:39: error: not found: value dbutils
dbutils.fs.rm(absTempFilePath,recurse=true)
^
<notebook>:39: error: not found: value recurse
dbutils.fs.rm(absTempFilePath,recurse=true)
^
Compilation failed.