Spaces:
Runtime error
Runtime error
Fabrice-TIERCELIN
commited on
Commit
•
bd2b3d4
1
Parent(s):
d53d32d
Delete clipseg/datasets/utils.py
Browse files- clipseg/datasets/utils.py +0 -68
clipseg/datasets/utils.py
DELETED
@@ -1,68 +0,0 @@
|
|
1 |
-
|
2 |
-
import numpy as np
|
3 |
-
import torch
|
4 |
-
|
5 |
-
|
6 |
-
def blend_image_segmentation(img, seg, mode, image_size=224):
|
7 |
-
|
8 |
-
|
9 |
-
if mode in {'blur_highlight', 'blur3_highlight', 'blur3_highlight01', 'blur_highlight_random', 'crop'}:
|
10 |
-
if isinstance(img, np.ndarray):
|
11 |
-
img = torch.from_numpy(img)
|
12 |
-
|
13 |
-
if isinstance(seg, np.ndarray):
|
14 |
-
seg = torch.from_numpy(seg)
|
15 |
-
|
16 |
-
if mode == 'overlay':
|
17 |
-
out = img * seg
|
18 |
-
out = [out.astype('float32')]
|
19 |
-
elif mode == 'highlight':
|
20 |
-
out = img * seg[None, :, :] * 0.85 + 0.15 * img
|
21 |
-
out = [out.astype('float32')]
|
22 |
-
elif mode == 'highlight2':
|
23 |
-
img = img / 2
|
24 |
-
out = (img+0.1) * seg[None, :, :] + 0.3 * img
|
25 |
-
out = [out.astype('float32')]
|
26 |
-
elif mode == 'blur_highlight':
|
27 |
-
from evaluation_utils import img_preprocess
|
28 |
-
out = [img_preprocess((None, [img], [seg]), blur=1, bg_fac=0.5).numpy()[0] - 0.01]
|
29 |
-
elif mode == 'blur3_highlight':
|
30 |
-
from evaluation_utils import img_preprocess
|
31 |
-
out = [img_preprocess((None, [img], [seg]), blur=3, bg_fac=0.5).numpy()[0] - 0.01]
|
32 |
-
elif mode == 'blur3_highlight01':
|
33 |
-
from evaluation_utils import img_preprocess
|
34 |
-
out = [img_preprocess((None, [img], [seg]), blur=3, bg_fac=0.1).numpy()[0] - 0.01]
|
35 |
-
elif mode == 'blur_highlight_random':
|
36 |
-
from evaluation_utils import img_preprocess
|
37 |
-
out = [img_preprocess((None, [img], [seg]), blur=0 + torch.randint(0, 3, (1,)).item(), bg_fac=0.1 + 0.8*torch.rand(1).item()).numpy()[0] - 0.01]
|
38 |
-
elif mode == 'crop':
|
39 |
-
from evaluation_utils import img_preprocess
|
40 |
-
out = [img_preprocess((None, [img], [seg]), blur=1, center_context=0.1, image_size=image_size)[0].numpy()]
|
41 |
-
elif mode == 'crop_blur_highlight':
|
42 |
-
from evaluation_utils import img_preprocess
|
43 |
-
out = [img_preprocess((None, [img], [seg]), blur=3, center_context=0.1, bg_fac=0.1, image_size=image_size)[0].numpy()]
|
44 |
-
elif mode == 'crop_blur_highlight352':
|
45 |
-
from evaluation_utils import img_preprocess
|
46 |
-
out = [img_preprocess((None, [img], [seg]), blur=3, center_context=0.1, bg_fac=0.1, image_size=352)[0].numpy()]
|
47 |
-
elif mode == 'shape':
|
48 |
-
out = [np.stack([seg[:, :]]*3).astype('float32')]
|
49 |
-
elif mode == 'concat':
|
50 |
-
out = [np.concatenate([img, seg[None, :, :]]).astype('float32')]
|
51 |
-
elif mode == 'image_only':
|
52 |
-
out = [img.astype('float32')]
|
53 |
-
elif mode == 'image_black':
|
54 |
-
out = [img.astype('float32')*0]
|
55 |
-
elif mode is None:
|
56 |
-
out = [img.astype('float32')]
|
57 |
-
elif mode == 'separate':
|
58 |
-
out = [img.astype('float32'), seg.astype('int64')]
|
59 |
-
elif mode == 'separate_img_black':
|
60 |
-
out = [img.astype('float32')*0, seg.astype('int64')]
|
61 |
-
elif mode == 'separate_seg_ones':
|
62 |
-
out = [img.astype('float32'), np.ones_like(seg).astype('int64')]
|
63 |
-
elif mode == 'separate_both_black':
|
64 |
-
out = [img.astype('float32')*0, seg.astype('int64')*0]
|
65 |
-
else:
|
66 |
-
raise ValueError(f'invalid mode: {mode}')
|
67 |
-
|
68 |
-
return out
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|