nielsr HF staff commited on
Commit
f60dcfe
1 Parent(s): ddcacbc

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +3 -2
README.md CHANGED
@@ -52,6 +52,7 @@ You can load the model and perform inference as follows:
52
  ```python
53
  from transformers import FuyuProcessor, FuyuForCausalLM
54
  from PIL import Image
 
55
 
56
  # load model and processor
57
  model_id = "adept/fuyu-8b"
@@ -60,8 +61,8 @@ model = FuyuForCausalLM.from_pretrained(model_id, device_map="cuda:0")
60
 
61
  # prepare inputs for the model
62
  text_prompt = "Generate a coco-style caption.\n"
63
- image_path = "bus.png" # https://huggingface.co/adept-hf-collab/fuyu-8b/blob/main/bus.png
64
- image = Image.open(image_path)
65
 
66
  inputs = processor(text=text_prompt, images=image, return_tensors="pt")
67
  for k, v in inputs.items():
 
52
  ```python
53
  from transformers import FuyuProcessor, FuyuForCausalLM
54
  from PIL import Image
55
+ import requests
56
 
57
  # load model and processor
58
  model_id = "adept/fuyu-8b"
 
61
 
62
  # prepare inputs for the model
63
  text_prompt = "Generate a coco-style caption.\n"
64
+ url = "https://huggingface.co/adept/fuyu-8b/resolve/main/bus.png"
65
+ image = Image.open(requests.get(url, stream=True).raw)
66
 
67
  inputs = processor(text=text_prompt, images=image, return_tensors="pt")
68
  for k, v in inputs.items():