Fix README.md
Browse files
README.md
CHANGED
|
@@ -19,6 +19,8 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline, set_seed
|
|
| 19 |
|
| 20 |
model = AutoModelForCausalLM.from_pretrained("sbintuitions/sarashina2-13b", torch_dtype=torch.bfloat16, device_map="auto")
|
| 21 |
tokenizer = AutoTokenizer.from_pretrained("sbintuitions/sarashina2-13b")
|
|
|
|
|
|
|
| 22 |
generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
| 23 |
set_seed(123)
|
| 24 |
|
|
|
|
| 19 |
|
| 20 |
model = AutoModelForCausalLM.from_pretrained("sbintuitions/sarashina2-13b", torch_dtype=torch.bfloat16, device_map="auto")
|
| 21 |
tokenizer = AutoTokenizer.from_pretrained("sbintuitions/sarashina2-13b")
|
| 22 |
+
# If you want to use slow tokenizer
|
| 23 |
+
# tokenizer = AutoTokenizer.from_pretrained("sbintuitions/sarashina2-13b", use_fast=False)
|
| 24 |
generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
| 25 |
set_seed(123)
|
| 26 |
|