transformers- (#3)
Browse files- fix transformers 4.45 compatibility (3e081f8b6b1cfdbde4c39e4efd2b84057021e5d7)
Co-authored-by: Ella Charlaix <[email protected]>
- tokenization_chatglm.py +2 -1
tokenization_chatglm.py
CHANGED
|
@@ -265,6 +265,7 @@ class ChatGLM4Tokenizer(PreTrainedTokenizer):
|
|
| 265 |
max_length: Optional[int] = None,
|
| 266 |
padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD,
|
| 267 |
pad_to_multiple_of: Optional[int] = None,
|
|
|
|
| 268 |
return_attention_mask: Optional[bool] = None,
|
| 269 |
) -> dict:
|
| 270 |
"""
|
|
@@ -292,7 +293,7 @@ class ChatGLM4Tokenizer(PreTrainedTokenizer):
|
|
| 292 |
"""
|
| 293 |
# Load from model defaults
|
| 294 |
assert self.padding_side == "left"
|
| 295 |
-
|
| 296 |
required_input = encoded_inputs[self.model_input_names[0]]
|
| 297 |
seq_length = len(required_input)
|
| 298 |
|
|
|
|
| 265 |
max_length: Optional[int] = None,
|
| 266 |
padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD,
|
| 267 |
pad_to_multiple_of: Optional[int] = None,
|
| 268 |
+
padding_side: Optional[bool] = None,
|
| 269 |
return_attention_mask: Optional[bool] = None,
|
| 270 |
) -> dict:
|
| 271 |
"""
|
|
|
|
| 293 |
"""
|
| 294 |
# Load from model defaults
|
| 295 |
assert self.padding_side == "left"
|
| 296 |
+
assert padding_side or "left" == "left"
|
| 297 |
required_input = encoded_inputs[self.model_input_names[0]]
|
| 298 |
seq_length = len(required_input)
|
| 299 |
|