Spaces:
Running
on
Zero
Running
on
Zero
nick_93
commited on
Commit
•
ee0bcfb
1
Parent(s):
87b4a1a
init
Browse files
app.py
CHANGED
@@ -64,10 +64,10 @@ def get_segmentation_pipeline(
|
|
64 |
Tuple[AutoImageProcessor, UperNetForSemanticSegmentation]: segmentation pipeline
|
65 |
"""
|
66 |
image_processor = AutoImageProcessor.from_pretrained(
|
67 |
-
"
|
68 |
)
|
69 |
image_segmentor = UperNetForSemanticSegmentation.from_pretrained(
|
70 |
-
"
|
71 |
)
|
72 |
return image_processor, image_segmentor
|
73 |
|
@@ -109,9 +109,9 @@ def segment_image(
|
|
109 |
|
110 |
|
111 |
def get_depth_pipeline():
|
112 |
-
feature_extractor = AutoImageProcessor.from_pretrained("
|
113 |
torch_dtype=dtype)
|
114 |
-
depth_estimator = AutoModelForDepthEstimation.from_pretrained("
|
115 |
torch_dtype=dtype)
|
116 |
return feature_extractor, depth_estimator
|
117 |
|
@@ -174,9 +174,9 @@ class ControlNetDepthDesignModelMulti:
|
|
174 |
|
175 |
#os.environ['HF_HUB_OFFLINE'] = "True"
|
176 |
controlnet_depth= ControlNetModel.from_pretrained(
|
177 |
-
"
|
178 |
controlnet_seg = ControlNetModel.from_pretrained(
|
179 |
-
"
|
180 |
|
181 |
self.pipe = StableDiffusionControlNetInpaintPipeline.from_pretrained(
|
182 |
"SG161222/Realistic_Vision_V5.1_noVAE",
|
|
|
64 |
Tuple[AutoImageProcessor, UperNetForSemanticSegmentation]: segmentation pipeline
|
65 |
"""
|
66 |
image_processor = AutoImageProcessor.from_pretrained(
|
67 |
+
"openmmlab/upernet-convnext-small"
|
68 |
)
|
69 |
image_segmentor = UperNetForSemanticSegmentation.from_pretrained(
|
70 |
+
"openmmlab/upernet-convnext-small"
|
71 |
)
|
72 |
return image_processor, image_segmentor
|
73 |
|
|
|
109 |
|
110 |
|
111 |
def get_depth_pipeline():
|
112 |
+
feature_extractor = AutoImageProcessor.from_pretrained("LiheYoung/depth-anything-large-hf",
|
113 |
torch_dtype=dtype)
|
114 |
+
depth_estimator = AutoModelForDepthEstimation.from_pretrained("LiheYoung/depth-anything-large-hf",
|
115 |
torch_dtype=dtype)
|
116 |
return feature_extractor, depth_estimator
|
117 |
|
|
|
174 |
|
175 |
#os.environ['HF_HUB_OFFLINE'] = "True"
|
176 |
controlnet_depth= ControlNetModel.from_pretrained(
|
177 |
+
"controlnet_depth", torch_dtype=dtype, use_safetensors=True)
|
178 |
controlnet_seg = ControlNetModel.from_pretrained(
|
179 |
+
"own_controlnet", torch_dtype=dtype, use_safetensors=True)
|
180 |
|
181 |
self.pipe = StableDiffusionControlNetInpaintPipeline.from_pretrained(
|
182 |
"SG161222/Realistic_Vision_V5.1_noVAE",
|