classLuongAttention(AttentionMechanism):"""Implements Luong-style (multiplicative) attention scoring.
This attention has two forms. The first is standard Luong attention,
as described in:
Minh-Thang Luong, Hieu Pham, Christopher D. Manning.
[Effective Approaches to Attention-based Neural Machine Translation.
EMNLP 2015.](https://arxiv.org/abs/1508.04025)
The second is the scaled form inspired partly by the normalized form of
Bahdanau attention.
To enable the second form, construct the object with parameter
`scale=True`.
"""
@typechecked
def__init__(
self,
units: TensorLike,
memory: Optional[TensorLike]=None,
memory_sequence_length: Optional[TensorLike]=None,
scale:bool=False,
probability_fn:str="softmax",
dtype: AcceptableDTypes =None,
name:str="LuongAttention",**kwargs,):"""Construct the AttentionMechanism mechanism.
Args:
units: The depth of the attention mechanism.
me