adjusting the model name in the usage instruction
#1
by
steilgedacht
- opened
README.md
CHANGED
@@ -62,7 +62,7 @@ from transformers import AutoTokenizer
|
|
62 |
import transformers
|
63 |
import torch
|
64 |
|
65 |
-
model = "liminerity/
|
66 |
messages = [{"role": "user", "content": "What is a large language model?"}]
|
67 |
|
68 |
tokenizer = AutoTokenizer.from_pretrained(model)
|
|
|
62 |
import transformers
|
63 |
import torch
|
64 |
|
65 |
+
model = "liminerity/Omningotex-7b-slerp"
|
66 |
messages = [{"role": "user", "content": "What is a large language model?"}]
|
67 |
|
68 |
tokenizer = AutoTokenizer.from_pretrained(model)
|