22

我尝试在 TensorFlow 2.0 (alpha) 中运行此代码:

import tensorflow_hub as hub

@tf.function
def elmo(texts):
    elmo_module = hub.Module("https://tfhub.dev/google/elmo/2", trainable=True)
    return elmo_module(texts, signature="default", as_dict=True)

embeds = elmo(tf.constant(["the cat is on the mat",
                           "dogs are in the fog"]))

但我得到了这个错误:

---------------------------------------------------------------------------
RuntimeError                              Traceback (most recent call last)
<ipython-input-1-c7f14c7ed0e9> in <module>
      9
     10 elmo(tf.constant(["the cat is on the mat",
---> 11                   "dogs are in the fog"]))

.../tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
    417     # This is the first call of __call__, so we have to initialize.
    418     initializer_map = {}
--> 419     self._initialize(args, kwds, add_initializers_to=initializer_map)
    420     if self._created_variables:
    421       try:

.../tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
    361     self._concrete_stateful_fn = (
    362         self._stateful_fn._get_concrete_function_internal_garbage_collected(  # pylint: disable=protected-access
--> 363             *args, **kwds))
    364
    365     def invalid_creator_scope(*unused_args, **unused_kwds):

.../tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
   1322     if self.input_signature:
   1323       args, kwargs = None, None
-> 1324     graph_function, _, _ = self._maybe_define_function(args, kwargs)
   1325     return graph_function
   1326

.../tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
   1585           or call_context_key not in self._function_cache.missed):
   1586         self._function_cache.missed.add(call_context_key)
-> 1587         graph_function = self._create_graph_function(args, kwargs)
   1588         self._function_cache.primary[cache_key] = graph_function
   1589         return graph_function, args, kwargs

.../tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
   1518             arg_names=arg_names,
   1519             override_flat_arg_shapes=override_flat_arg_shapes,
-> 1520             capture_by_value=self._capture_by_value),
   1521         self._function_attributes)
   1522

.../tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
    705                                           converted_func)
    706
--> 707       func_outputs = python_func(*func_args, **func_kwargs)
    708
    709       # invariant: `func_outputs` contains only Tensors, IndexedSlices,

.../tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
    314         # __wrapped__ allows AutoGraph to swap in a converted function. We give
    315         # the function a weak reference to itself to avoid a reference cycle.
--> 316         return weak_wrapped_fn().__wrapped__(*args, **kwds)
    317     weak_wrapped_fn = weakref.ref(wrapped_fn)
    318

.../tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
    697                   optional_features=autograph_options,
    698                   force_conversion=True,
--> 699               ), args, kwargs)
    700
    701         # Wrapping around a decorator allows checks like tf_inspect.getargspec

.../tensorflow/python/autograph/impl/api.py in converted_call(f, owner, options, args, kwargs)
    355
    356   if kwargs is not None:
--> 357     result = converted_f(*effective_args, **kwargs)
    358   else:
    359     result = converted_f(*effective_args)

/var/folders/wy/h39t6kb11pnbb0pzhksd_fqh0000gn/T/tmp4v3g2d_1.py in tf__elmo(texts)
     11       retval_ = None
     12       print('Eager:', ag__.converted_call('executing_eagerly', tf, ag__.ConversionOptions(recursive=True, force_conversion=False, optional_features=(), internal_convert_user_code=True), (), None))
---> 13       elmo_module = ag__.converted_call('Module', hub, ag__.ConversionOptions(recursive=True, force_conversion=False, optional_features=(), internal_convert_user_code=True), ('https://tfhub.dev/google/elmo/2',), {'trainable': True})
     14       do_return = True
     15       retval_ = ag__.converted_call(elmo_module, None, ag__.ConversionOptions(recursive=True, force_conversion=False, optional_features=(), internal_convert_user_code=True), (texts,), {'signature': 'default', 'as_dict': True})

.../tensorflow/python/autograph/impl/api.py in converted_call(f, owner, options, args, kwargs)
    252   if tf_inspect.isclass(f):
    253     logging.log(2, 'Permanently whitelisted: %s: constructor', f)
--> 254     return _call_unconverted(f, args, kwargs)
    255
    256   # Other built-in modules are permanently whitelisted.

.../tensorflow/python/autograph/impl/api.py in _call_unconverted(f, args, kwargs)
    174
    175   if kwargs is not None:
--> 176     return f(*args, **kwargs)
    177   else:
    178     return f(*args)

.../tensorflow_hub/module.py in __init__(self, spec, trainable, name, tags)
    167           name=self._name,
    168           trainable=self._trainable,
--> 169           tags=self._tags)
    170       # pylint: enable=protected-access
    171

.../tensorflow_hub/native_module.py in _create_impl(self, name, trainable, tags)
    338         trainable=trainable,
    339         checkpoint_path=self._checkpoint_variables_path,
--> 340         name=name)
    341
    342   def _export(self, path, variables_saver):

.../tensorflow_hub/native_module.py in __init__(self, spec, meta_graph, trainable, checkpoint_path, name)
    389     # TPU training code.
    390     with tf.init_scope():
--> 391       self._init_state(name)
    392
    393   def _init_state(self, name):

.../tensorflow_hub/native_module.py in _init_state(self, name)
    392
    393   def _init_state(self, name):
--> 394     variable_tensor_map, self._state_map = self._create_state_graph(name)
    395     self._variable_map = recover_partitioned_variable_map(
    396         get_node_map_from_tensor_map(variable_tensor_map))

.../tensorflow_hub/native_module.py in _create_state_graph(self, name)
    449         meta_graph,
    450         input_map={},
--> 451         import_scope=relative_scope_name)
    452
    453     # Build a list from the variable name in the module definition to the actual

.../tensorflow/python/training/saver.py in import_meta_graph(meta_graph_or_file, clear_devices, import_scope, **kwargs)
   1443   """  # pylint: disable=g-doc-exception
   1444   return _import_meta_graph_with_return_elements(
-> 1445       meta_graph_or_file, clear_devices, import_scope, **kwargs)[0]
   1446
   1447

.../tensorflow/python/training/saver.py in _import_meta_graph_with_return_elements(meta_graph_or_file, clear_devices, import_scope, return_elements, **kwargs)
   1451   """Import MetaGraph, and return both a saver and returned elements."""
   1452   if context.executing_eagerly():
-> 1453     raise RuntimeError("Exporting/importing meta graphs is not supported when "
   1454                        "eager execution is enabled. No graph exists when eager "
   1455                        "execution is enabled.")

RuntimeError: Exporting/importing meta graphs is not supported when eager execution is enabled. No graph exists when eager execution is enabled.
4

3 回答 3

27

在 TensorFlow 2.0 中,您应该使用hub.load()or hub.KerasLayer()

[2019 年 4 月] - 目前只有 Tensorflow 2.0 模块可以通过它们加载。将来,许多 1.x Hub 模块也应该是可加载的。

对于仅限 2.x 的模块,您可以在此处为模块创建的笔记本中查看示例

于 2019-04-09T15:41:37.520 回答
7

此函数加载将与 tensorflow 2 一起使用

embed = hub.load("https://tfhub.dev/google/universal-sentence-encoder-large/3")

代替

embed = hub.Module("https://tfhub.dev/google/universal-sentence-encoder-large/3")

[这在 tf2 中不被接受] 使用 hub.load()

于 2020-04-07T23:12:43.757 回答
5

2021 年 1 月

要使用来自 TF Hub 的模型,包括 ELMO,例如,使用 tensorflow 2.x 在本地加载和解压模型:

cd ~/tfhub/elmo3
model_link='https://tfhub.dev/google/elmo/3'
model_link=$model_link'?tf-hub-format=compressed'
wget $model_link -O model
tar xvzf model
rm model

然后使用hub.load()

import tensorflow as tf
import tensorflow_hub as hub
elmo = hub.load("/home/user/tfhub/elmo3")

embeddings = elmo.signatures["default"](tf.constant([
                "i like green eggs and ham",
                "i like green ham and eggs"
                ])
                )["elmo"]

此函数可以处理已弃用的 TF1 Hub 格式

于 2020-08-27T15:36:14.693 回答