rust_bert/models/pegasus/
attention.rs

1// Copyright 2021, Google and The HuggingFace Inc. team. All rights reserved.
2// Copyright 2021 Guillaume Becquin
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//     http://www.apache.org/licenses/LICENSE-2.0
7// Unless required by applicable law or agreed to in writing, software
8// distributed under the License is distributed on an "AS IS" BASIS,
9// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10// See the License for the specific language governing permissions and
11// limitations under the License.
12
13use crate::bart::LayerState as BartLayerState;
14
15/// # Cache for Pegasus attention layers
16/// Stores the cached value of key, value and key padding mask to avoid recalculation (e.g. at each generation step)
17/// Identical to BART cache (type alias).
18pub type LayerState = BartLayerState;