[Doc] Fix small typo in Transformers fallback (#14791)

Signed-off-by: Chen Zhang <zhangch99@outlook.com>
This commit is contained in:
Chen Zhang 2025-03-14 11:33:12 +08:00 committed by GitHub
parent 3fb17d26c8
commit 60c872d4b6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -101,7 +101,7 @@ class MyAttention(nn.Module):
def forward(self, hidden_states, **kwargs): # <- kwargs are required def forward(self, hidden_states, **kwargs): # <- kwargs are required
... ...
attention_interface = attention_interface = ALL_ATTENTION_FUNCTIONS[self.config._attn_implementation] attention_interface = ALL_ATTENTION_FUNCTIONS[self.config._attn_implementation]
attn_output, attn_weights = attention_interface( attn_output, attn_weights = attention_interface(
self, self,
query_states, query_states,