我无法使用 spark-nlp 库提供的预定义管道“recognize_entities_dl”
我尝试安装不同版本的 pyspark 和 spark-nlp 库
import sparknlp
from sparknlp.pretrained import PretrainedPipeline
#create or get Spark Session
spark = sparknlp.start()
sparknlp.version()
spark.version
#download, load, and annotate a text by pre-trained pipeline
pipeline = PretrainedPipeline('recognize_entities_dl', lang='en')
result = pipeline.annotate('Harry Potter is a great movie')
2.1.0
recognize_entities_dl download started this may take some time.
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-13-b71a0f77e93a> in <module>
11 #download, load, and annotate a text by pre-trained pipeline
12
---> 13 pipeline = PretrainedPipeline('recognize_entities_dl', 'en')
14 result = pipeline.annotate('Harry Potter is a great movie')
d:\python36\lib\site-packages\sparknlp\pretrained.py in __init__(self, name, lang, remote_loc)
89
90 def __init__(self, name, lang='en', remote_loc=None):
---> 91 self.model = ResourceDownloader().downloadPipeline(name, lang, remote_loc)
92 self.light_model = LightPipeline(self.model)
93
d:\python36\lib\site-packages\sparknlp\pretrained.py in downloadPipeline(name, language, remote_loc)
50 def downloadPipeline(name, language, remote_loc=None):
51 print(name + " download started this may take some time.")
---> 52 file_size = _internal._GetResourceSize(name, language, remote_loc).apply()
53 if file_size == "-1":
54 print("Can not find the model to download please check the name!")
AttributeError: module 'sparknlp.internal' has no attribute '_GetResourceSize'