- Unable to create DataContext with the following configuration.I am try to use a Databricks spark df datasource and in house DB as storeBackendDefaults
- I get the MissingConfigVariableError exceptions
- Could some explain what I am missing
import great_expectations as ge
import great_expectations.exceptions as ge_exceptions
from great_expectations.data_context.types.base import DataContextConfig, DatasourceConfig, FilesystemStoreBackendDefaults, DatabaseStoreBackendDefaults
from great_expectations.data_context import BaseDataContext
my_spark_datasource_config = DatasourceConfig(
class_name="Datasource",
execution_engine={"class_name": "SparkDFExecutionEngine"},
data_connectors={"sample_sparkdf_runtime_data_connector": {
"module_name": "great_expectations.datasource.data_connector",
"class_name": "RuntimeDataConnector",
"batch_identifiers": [
"some_key_maybe_pipeline_stage",
"some_other_key_maybe_run_id"
]
}
}
)
data_context_config = DataContextConfig(config_version = 2
,plugins_directory = None
,config_variables_file_path = None
,datasources={"my_spark_datasource": my_spark_datasource_config}
,store_backend_defaults=DatabaseStoreBackendDefaults(default_credentials = {
"drivername": "PrestoSQL",
"host": "*****",
"port": "443",
"username": "*****",
"password": "*****",
"database": "****"
}
),
anonymous_usage_statistics={"enabled": False}
)
context = BaseDataContext(project_config=data_context_config)