在尝试对一维向量进行自我关注时,我一遍又一遍地遇到此错误,我真的不明白为什么会发生这种情况,任何帮助将不胜感激。
layer = layers.MultiHeadAttention(num_heads=2, key_dim=2)
target = tf.ones(shape=[1, 16])
source = tf.ones(shape=[1, 16])
output_tensor, weights = layer(target, source)
错误:
~/anaconda3/envs/tf/lib/python3.9/site-packages/tensorflow/python/keras/layers/multi_head_attention.py in _masked_softmax(self, attention_scores, attention_mask)
399 attention_mask = array_ops.expand_dims(
400 attention_mask, axis=mask_expansion_axes)
--> 401 return self._softmax(attention_scores, attention_mask)
402
403 def _compute_attention(self,
~/anaconda3/envs/tf/lib/python3.9/site-packages/tensorflow/python/keras/engine/base_layer.py in __call__(self, *args, **kwargs)
1010 with autocast_variable.enable_auto_cast_variables(
1011 self._compute_dtype_object):
-> 1012 outputs = call_fn(inputs, *args, **kwargs)
1013
1014 if self._activity_regularizer:
~/anaconda3/envs/tf/lib/python3.9/site-packages/tensorflow/python/keras/layers/advanced_activations.py in call(self, inputs, mask)
332 inputs, axis=self.axis, keepdims=True))
333 else:
--> 334 return K.softmax(inputs, axis=self.axis[0])
335 return K.softmax(inputs, axis=self.axis)
336
IndexError: tuple index out of range