import torch from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("/export/share/txie/llama2/Llama-2-7b-hf") model = AutoModelForCausalLM.from_pretrained("/export/share/txie/llama2/Llama-2-7b-hf", torch_dtype=torch.bfloat16) inputs = tokenizer("The world is", return_tensors="pt") sample = model.generate(**inputs, max_length=128) print(tokenizer.decode(sample[0]))