[bugfix] fix inductor cache on max_position_embeddings (#15436)

Signed-off-by: youkaichao <youkaichao@gmail.com>
This commit is contained in:
youkaichao 2025-03-25 22:05:39 +08:00 committed by GitHub
parent a608160027
commit d0cfec7ab9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -221,6 +221,9 @@ class ModelConfig:
factors.append(self.trust_remote_code)
factors.append(self.rope_scaling)
factors.append(self.rope_theta)
# rope cos/sin cache depends on the max_position_embeddings
factors.append(
getattr(self.hf_config, "max_position_embeddings", "None"))
return hashlib.sha256(str(factors).encode()).hexdigest()
def __init__(