From a500911234394d24e40a913ac9682a3b187395b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=BA=90=E6=96=87=E9=9B=A8?= <41315874+fumiama@users.noreply.github.com> Date: Tue, 18 Feb 2025 15:17:42 +0900 Subject: [PATCH] fix(gpt): drop deprecation usage of get_max_length() --- ChatTTS/model/gpt.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ChatTTS/model/gpt.py b/ChatTTS/model/gpt.py index c69184a8b..9ead32f49 100644 --- a/ChatTTS/model/gpt.py +++ b/ChatTTS/model/gpt.py @@ -187,7 +187,10 @@ def _prepare_generation_inputs( if cache_position is not None else past_key_values.get_seq_length() ) - max_cache_length = past_key_values.get_max_length() + try: + max_cache_length = past_key_values.get_max_cache_shape() + except: + max_cache_length = past_key_values.get_max_length() # deprecated in transformers 4.48 cache_length = ( past_length if max_cache_length is None