Spaces:
Running
on
Zero
Running
on
Zero
Solved Example Error issue
#2
by
KingNish
- opened
app.py
CHANGED
@@ -4,6 +4,9 @@ import random
|
|
4 |
from diffusers import AuraFlowPipeline
|
5 |
import torch
|
6 |
import spaces
|
|
|
|
|
|
|
7 |
|
8 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
9 |
|
@@ -28,37 +31,45 @@ pipe = AuraFlowPipeline.from_pretrained(
|
|
28 |
MAX_SEED = np.iinfo(np.int32).max
|
29 |
MAX_IMAGE_SIZE = 1024
|
30 |
|
|
|
|
|
|
|
|
|
|
|
31 |
@spaces.GPU
|
32 |
-
def infer(prompt, negative_prompt="", seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=5.0, num_inference_steps=
|
33 |
|
34 |
if randomize_seed:
|
35 |
seed = random.randint(0, MAX_SEED)
|
36 |
|
37 |
generator = torch.Generator().manual_seed(seed)
|
38 |
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
)
|
48 |
-
|
49 |
-
return image, seed
|
50 |
|
51 |
examples = [
|
52 |
-
"
|
53 |
-
"
|
54 |
-
|
|
|
|
|
55 |
"A delicious ceviche cheesecake slice",
|
|
|
56 |
]
|
57 |
|
|
|
58 |
css="""
|
59 |
#col-container {
|
60 |
margin: 0 auto;
|
61 |
-
max-width:
|
62 |
}
|
63 |
"""
|
64 |
|
@@ -88,13 +99,13 @@ with gr.Blocks(css=css) as demo:
|
|
88 |
|
89 |
run_button = gr.Button("Run", scale=0)
|
90 |
|
91 |
-
result = gr.
|
92 |
|
93 |
with gr.Accordion("Advanced Settings", open=False):
|
94 |
|
95 |
negative_prompt = gr.Text(
|
96 |
label="Negative prompt",
|
97 |
-
|
98 |
placeholder="Enter a negative prompt",
|
99 |
)
|
100 |
|
@@ -149,7 +160,7 @@ with gr.Blocks(css=css) as demo:
|
|
149 |
fn = infer,
|
150 |
inputs = [prompt],
|
151 |
outputs = [result, seed],
|
152 |
-
cache_examples=
|
153 |
)
|
154 |
|
155 |
gr.on(
|
|
|
4 |
from diffusers import AuraFlowPipeline
|
5 |
import torch
|
6 |
import spaces
|
7 |
+
import uuid
|
8 |
+
import random
|
9 |
+
import os
|
10 |
|
11 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
12 |
|
|
|
31 |
MAX_SEED = np.iinfo(np.int32).max
|
32 |
MAX_IMAGE_SIZE = 1024
|
33 |
|
34 |
+
def save_image(img):
|
35 |
+
unique_name = str(uuid.uuid4()) + ".png"
|
36 |
+
img.save(unique_name)
|
37 |
+
return unique_name
|
38 |
+
|
39 |
@spaces.GPU
|
40 |
+
def infer(prompt, negative_prompt="", seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=5.0, num_inference_steps=30, progress=gr.Progress(track_tqdm=True)):
|
41 |
|
42 |
if randomize_seed:
|
43 |
seed = random.randint(0, MAX_SEED)
|
44 |
|
45 |
generator = torch.Generator().manual_seed(seed)
|
46 |
|
47 |
+
options = { "prompt" : prompt,
|
48 |
+
"negative_prompt" : negative_prompt,
|
49 |
+
"width":width,
|
50 |
+
"height":height,
|
51 |
+
"guidance_scale" : guidance_scale,
|
52 |
+
"num_inference_steps" : num_inference_steps,
|
53 |
+
"generator" : generator }
|
54 |
+
images = pipe(**options).images
|
55 |
+
image_paths = [save_image(img) for img in images]
|
56 |
+
return image_paths, seed
|
|
|
57 |
|
58 |
examples = [
|
59 |
+
"An alien grasping a sign board contain word 'AuraFlow', futuristic, neonpunk, detailed",
|
60 |
+
"a cute robot artist painting on an easel, concept art, light colors, cute",
|
61 |
+
'A vibrant street wall covered in colorful graffiti, the centerpiece spells "FIRE", in a storm of colors',
|
62 |
+
'Digital art, portrait of an anthropomorphic roaring Tiger warrior with full armor, close up in the middle of a battle, behind him there is a banner with the text "Open Source"'
|
63 |
+
"Astronaut in a jungle grasping a sign board contain word 'I love SPACE', cold color palette, muted colors, detailed, futuristic",
|
64 |
"A delicious ceviche cheesecake slice",
|
65 |
+
"a ROBOT riding a BLUE horse on Mars, photorealistic",
|
66 |
]
|
67 |
|
68 |
+
|
69 |
css="""
|
70 |
#col-container {
|
71 |
margin: 0 auto;
|
72 |
+
max-width: 600px;
|
73 |
}
|
74 |
"""
|
75 |
|
|
|
99 |
|
100 |
run_button = gr.Button("Run", scale=0)
|
101 |
|
102 |
+
result = gr.Gallery(label="Result", columns=1, show_label=False)
|
103 |
|
104 |
with gr.Accordion("Advanced Settings", open=False):
|
105 |
|
106 |
negative_prompt = gr.Text(
|
107 |
label="Negative prompt",
|
108 |
+
lines=1,
|
109 |
placeholder="Enter a negative prompt",
|
110 |
)
|
111 |
|
|
|
160 |
fn = infer,
|
161 |
inputs = [prompt],
|
162 |
outputs = [result, seed],
|
163 |
+
cache_examples=True
|
164 |
)
|
165 |
|
166 |
gr.on(
|