Dc-4nderson commited on
Commit
7d763f9
·
verified ·
1 Parent(s): 83a1a3f

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +3 -1
README.md CHANGED
@@ -70,7 +70,7 @@ Loss steadily decreased during training, and accuracy remained consistently abov
70
  from transformers import AutoTokenizer, AutoModelForCausalLM
71
  from peft import PeftModel
72
 
73
- base = "mistralai/Mistral-7B-v0.2"
74
  adapter = "Dc-4nderson/transcript_summarizer_model"
75
 
76
  tokenizer = AutoTokenizer.from_pretrained(base)
@@ -86,6 +86,8 @@ text = (
86
  inputs = tokenizer(text, return_tensors="pt")
87
  outputs = model.generate(**inputs, max_new_tokens=30000)
88
  print(tokenizer.decode(outputs[0], skip_special_tokens=True))
 
 
89
  🧾 License
90
 
91
  Released under the MIT License — free for research and commercial use with attribution.
 
70
  from transformers import AutoTokenizer, AutoModelForCausalLM
71
  from peft import PeftModel
72
 
73
+ base = "mistralai/Mistral-7B-Instruct-v0.2"
74
  adapter = "Dc-4nderson/transcript_summarizer_model"
75
 
76
  tokenizer = AutoTokenizer.from_pretrained(base)
 
86
  inputs = tokenizer(text, return_tensors="pt")
87
  outputs = model.generate(**inputs, max_new_tokens=30000)
88
  print(tokenizer.decode(outputs[0], skip_special_tokens=True))
89
+
90
+ ```
91
  🧾 License
92
 
93
  Released under the MIT License — free for research and commercial use with attribution.