[bugfix] fix inductor cache on max_position_embeddings (#15436)
Signed-off-by: youkaichao <youkaichao@gmail.com>
This commit is contained in:
parent
a608160027
commit
d0cfec7ab9
@ -221,6 +221,9 @@ class ModelConfig:
|
|||||||
factors.append(self.trust_remote_code)
|
factors.append(self.trust_remote_code)
|
||||||
factors.append(self.rope_scaling)
|
factors.append(self.rope_scaling)
|
||||||
factors.append(self.rope_theta)
|
factors.append(self.rope_theta)
|
||||||
|
# rope cos/sin cache depends on the max_position_embeddings
|
||||||
|
factors.append(
|
||||||
|
getattr(self.hf_config, "max_position_embeddings", "None"))
|
||||||
return hashlib.sha256(str(factors).encode()).hexdigest()
|
return hashlib.sha256(str(factors).encode()).hexdigest()
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user