Spaces:
Running
Running
add: downscale slider
Browse files- app.py +1 -0
- pixelization.py +3 -2
app.py
CHANGED
@@ -51,6 +51,7 @@ def main():
|
|
51 |
gr.Interface(m.pixelize_modified,
|
52 |
[
|
53 |
gr.components.Image(type='pil', label='Input'),
|
|
|
54 |
gr.components.Slider(minimum=1, maximum=16, value=4, step=1, label='Pixel Size'),
|
55 |
gr.components.Checkbox(True, label="Upscale after")
|
56 |
],
|
|
|
51 |
gr.Interface(m.pixelize_modified,
|
52 |
[
|
53 |
gr.components.Image(type='pil', label='Input'),
|
54 |
+
gr.components.Slider(minimum=1, maximum=8, value=1, step=1, label='Downscale Input'),
|
55 |
gr.components.Slider(minimum=1, maximum=16, value=4, step=1, label='Pixel Size'),
|
56 |
gr.components.Checkbox(True, label="Upscale after")
|
57 |
],
|
pixelization.py
CHANGED
@@ -41,14 +41,15 @@ class Model():
|
|
41 |
|
42 |
save(out_t, out_img)
|
43 |
|
44 |
-
def pixelize_modified(self, in_img, pixel_size, upscale_after) -> Image.Image:
|
45 |
with torch.no_grad():
|
46 |
in_img = in_img.convert('RGB')
|
47 |
|
48 |
# limit in_img size to 1024x1024 so it didn't destroyed by large image
|
49 |
if in_img.size[0] > 1024 or in_img.size[1] > 1024:
|
50 |
in_img.thumbnail((1024, 1024), Image.NEAREST)
|
51 |
-
|
|
|
52 |
in_img.resize((in_img.size[0] * 4 // pixel_size, in_img.size[1] * 4 // pixel_size))
|
53 |
|
54 |
in_t = process(in_img).to(self.device)
|
|
|
41 |
|
42 |
save(out_t, out_img)
|
43 |
|
44 |
+
def pixelize_modified(self, in_img, downscale, pixel_size, upscale_after) -> Image.Image:
|
45 |
with torch.no_grad():
|
46 |
in_img = in_img.convert('RGB')
|
47 |
|
48 |
# limit in_img size to 1024x1024 so it didn't destroyed by large image
|
49 |
if in_img.size[0] > 1024 or in_img.size[1] > 1024:
|
50 |
in_img.thumbnail((1024, 1024), Image.NEAREST)
|
51 |
+
if downscale>1:
|
52 |
+
in_img.resize((in_img.size[0]//downscale, in_img.size[1]//downscale))
|
53 |
in_img.resize((in_img.size[0] * 4 // pixel_size, in_img.size[1] * 4 // pixel_size))
|
54 |
|
55 |
in_t = process(in_img).to(self.device)
|