AkimfromParis
commited on
Commit
•
b60351d
1
Parent(s):
64f1af2
Update README.md
Browse files
README.md
CHANGED
@@ -45,22 +45,18 @@ dtype: bfloat16
|
|
45 |
```python
|
46 |
# !pip install -qU transformers accelerate
|
47 |
|
48 |
-
from transformers import AutoTokenizer, AutoModelForCausalLM
|
49 |
-
from transformers import pipeline
|
50 |
import torch
|
51 |
|
52 |
model_name = "AkimfromParis/Heliotrope-Ely-Swa-slerp-7B"
|
53 |
|
54 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
55 |
-
model = AutoModelForCausalLM.from_pretrained(model_name
|
56 |
|
57 |
-
pipe = pipeline("text-generation",
|
|
|
58 |
|
59 |
-
|
60 |
-
{"role": "system", "content": "あなたは誠実で優秀な日本人のアシスタントです。以下のトピックに関する詳細な情報を提供してください。"},
|
61 |
-
{"role": "user", "content": "大谷翔平選手は誰ですか?"},
|
62 |
-
]
|
63 |
-
print(pipe(messages, max_new_tokens=256)[0]['generated_text'][-1])
|
64 |
```
|
65 |
|
66 |
# 🔖 Citation
|
|
|
45 |
```python
|
46 |
# !pip install -qU transformers accelerate
|
47 |
|
48 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
|
|
49 |
import torch
|
50 |
|
51 |
model_name = "AkimfromParis/Heliotrope-Ely-Swa-slerp-7B"
|
52 |
|
53 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
54 |
+
model = AutoModelForCausalLM.from_pretrained(model_name)
|
55 |
|
56 |
+
pipe = pipeline("text-generation",model=model, tokenizer=tokenizer, torch_dtype=torch.float16, device_map="auto")
|
57 |
+
sequences = pipe('大谷翔平選手は', do_sample=False, max_new_tokens=100)
|
58 |
|
59 |
+
print(sequences[0].get("generated_text"))
|
|
|
|
|
|
|
|
|
60 |
```
|
61 |
|
62 |
# 🔖 Citation
|