virendravaishnav
commited on
Commit
•
7fee682
1
Parent(s):
1229304
Updated with OCR model and Gradio integration
Browse files
app.py
CHANGED
@@ -1,11 +1,12 @@
|
|
1 |
import gradio as gr
|
2 |
-
from transformers import
|
3 |
import torch
|
4 |
|
5 |
repo_id = "OpenGVLab/InternVL2-1B"
|
6 |
|
7 |
-
# Load the processor and model directly from the Hub
|
8 |
-
|
|
|
9 |
model = AutoModel.from_pretrained(
|
10 |
repo_id,
|
11 |
trust_remote_code=True,
|
@@ -22,9 +23,9 @@ def analyze_image(image):
|
|
22 |
text = "describe this image"
|
23 |
|
24 |
# Process the image
|
25 |
-
image_inputs =
|
26 |
# Process the text
|
27 |
-
text_inputs =
|
28 |
|
29 |
# Combine the inputs
|
30 |
inputs = {
|
@@ -37,7 +38,7 @@ def analyze_image(image):
|
|
37 |
outputs = model.generate(**inputs)
|
38 |
|
39 |
# Decode the outputs
|
40 |
-
generated_text =
|
41 |
return generated_text
|
42 |
except Exception as e:
|
43 |
return f"An error occurred: {str(e)}"
|
|
|
1 |
import gradio as gr
|
2 |
+
from transformers import AutoImageProcessor, AutoTokenizer, AutoModel
|
3 |
import torch
|
4 |
|
5 |
repo_id = "OpenGVLab/InternVL2-1B"
|
6 |
|
7 |
+
# Load the image processor, tokenizer, and model directly from the Hub
|
8 |
+
image_processor = AutoImageProcessor.from_pretrained(repo_id, trust_remote_code=True)
|
9 |
+
tokenizer = AutoTokenizer.from_pretrained(repo_id, trust_remote_code=True)
|
10 |
model = AutoModel.from_pretrained(
|
11 |
repo_id,
|
12 |
trust_remote_code=True,
|
|
|
23 |
text = "describe this image"
|
24 |
|
25 |
# Process the image
|
26 |
+
image_inputs = image_processor(images=img, return_tensors="pt").to(device)
|
27 |
# Process the text
|
28 |
+
text_inputs = tokenizer(text, return_tensors="pt").to(device)
|
29 |
|
30 |
# Combine the inputs
|
31 |
inputs = {
|
|
|
38 |
outputs = model.generate(**inputs)
|
39 |
|
40 |
# Decode the outputs
|
41 |
+
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
42 |
return generated_text
|
43 |
except Exception as e:
|
44 |
return f"An error occurred: {str(e)}"
|