image
imagewidth (px) 256
256
| gender
stringclasses 2
values | age
stringclasses 1
value | race
stringclasses 5
values |
---|---|---|---|
female | adult | Asian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | unknown |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | African American |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | unknown |
|
female | adult | Asian |
|
female | adult | African American |
|
female | adult | African American |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | African American |
|
female | adult | unknown |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | unknown |
|
female | adult | African American |
|
female | adult | Caucasian |
|
female | adult | unknown |
|
female | adult | African American |
|
female | adult | Asian |
|
female | adult | Caucasian |
|
female | adult | unknown |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | African American |
|
female | adult | African American |
|
female | adult | Caucasian |
|
female | adult | African American |
|
female | adult | African American |
|
female | adult | African American |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Asian |
|
female | adult | African American |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | African American |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | African American |
|
female | adult | Caucasian |
|
female | adult | Asian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Asian |
|
female | adult | African American |
|
female | adult | African American |
|
female | adult | Caucasian |
|
female | adult | Asian |
|
female | adult | African American |
|
female | adult | Caucasian |
|
female | adult | African American |
|
female | adult | African American |
|
female | adult | Asian |
|
female | adult | African American |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | African American |
|
female | adult | Caucasian |
|
female | adult | African American |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | unknown |
|
female | adult | unknown |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | Caucasian |
|
female | adult | African American |
End of preview. Expand
in Dataset Viewer.
Usage
this dataset is intended to use with openbmb/MiniCPM-V-2_6 model
import json
from itertools import product
from PIL import ImageDraw
import torch
from datasets import load_dataset
from PIL import Image
from transformers import AutoModel, AutoTokenizer
model = AutoModel.from_pretrained('openbmb/MiniCPM-V-2_6', trust_remote_code=True,
attn_implementation='sdpa', torch_dtype=torch.bfloat16,
low_cpu_mem_usage=True
) # sdpa or flash_attention_2, no eager
model = model.eval().cuda()
tokenizer = AutoTokenizer.from_pretrained('openbmb/MiniCPM-V-2_6', trust_remote_code=True)
question = """\
Please describe a person based on the following attributes: gender, age, and race.
Ensure that the response is structured according to the following schema:
Person:
- gender: (The person's gender. Options: male, female, unknown.)
- age: (The person's age. Options: child, adult, senior, unknown.)
- race: (The person's race. Options: Caucasian, African American, Asian, Hispanic, Middle Eastern, Native American, unknown.)
Provide the description as a JSON object matching the schema.
"""
dataset = load_dataset('famousdetectiveadrianmonk/person-attributes-fewshot')['train']
msgs = []
for gender, age, race in product(
dataset.unique('gender'),
dataset.unique('age'),
dataset.unique('race'),
):
selected = dataset.filter(
lambda row: all(
[
row['gender'] == gender,
row['age'] == age,
row['race'] == race,
]
)
)
if not selected:
continue
example = selected.shuffle().take(1)
answer = json.dumps(dict(
gender = example['gender'],
age = example['age'],
race = example['race'],
))
img = example['image']
msgs.extend([
{'role': 'user', 'content': [img, question]},
{'role': 'assistant', 'content': [answer]},
])
del dataset
img = Image.open(...)
resized_img = img.resize((256,256)) # or crop
if False:
# you can test with this example image
dataset = load_dataset("TryOnVirtual/VITON-HD-Captions")
example = dataset['train'].shuffle().take(1)[0]
img = example['image']
crop_size = min(img.size)
resized_img = img.crop((0,0,crop_size,crop_size)).resize((256,256))
for _ in range(10):
try:
res = model.chat(
image=None,
msgs= [*msgs,
{'role': 'user', 'content': [resized_img, question]}],
tokenizer=tokenizer
)
pred = json.loads(res)
break
except json.JSONDecodeError:
...
else:
print('all failed')
annotated_image = img.copy()
draw = ImageDraw.Draw(annotated_image)
# Draw the text on the image
draw.text(
(0,0),
json.dumps(pred, indent=4),
font_size=40,
fill = (255, 0, 0),
)
annotated_image
Attribution
This dataset includes data originally from [TryOnVirtual/VITON-HD-Captions], created by [Original Author or Organization]. It is licensed under the Apache License 2.0. You can find the original dataset here.
@article{yao2024minicpm, title={MiniCPM-V: A GPT-4V Level MLLM on Your Phone}, author={Yao, Yuan and Yu, Tianyu and Zhang, Ao and Wang, Chongyi and Cui, Junbo and Zhu, Hongji and Cai, Tianchi and Li, Haoyu and Zhao, Weilin and He, Zhihui and others}, journal={arXiv preprint arXiv:2408.01800}, year={2024} }
- Downloads last month
- 44