Spaces:
Runtime error
Runtime error
Commit
·
d03aa09
1
Parent(s):
2b9672f
Update app.py
Browse files
app.py
CHANGED
|
@@ -222,7 +222,7 @@ block_css = """
|
|
| 222 |
def build_demo(embed_mode):
|
| 223 |
|
| 224 |
textbox = gr.Textbox(show_label=False, placeholder="Enter text and press ENTER", container=False)
|
| 225 |
-
with gr.Blocks(title="
|
| 226 |
state = gr.State()
|
| 227 |
|
| 228 |
if not embed_mode:
|
|
@@ -246,17 +246,18 @@ def build_demo(embed_mode):
|
|
| 246 |
value="Default",
|
| 247 |
label="Preprocess for non-square image", visible=False)
|
| 248 |
|
| 249 |
-
|
| 250 |
-
|
| 251 |
-
|
| 252 |
-
|
| 253 |
-
|
| 254 |
-
|
| 255 |
-
|
| 256 |
-
|
| 257 |
-
|
| 258 |
-
|
| 259 |
-
|
|
|
|
| 260 |
|
| 261 |
with gr.Accordion("Parameters", open=False) as parameter_row:
|
| 262 |
temperature = gr.Slider(minimum=0.0, maximum=1.0, value=0.8, step=0.1, interactive=True, label="Temperature",)
|
|
@@ -282,8 +283,8 @@ def build_demo(embed_mode):
|
|
| 282 |
gr.Examples(examples=[
|
| 283 |
[f"{cur_dir}/examples/people.jpg", f"{cur_dir}/examples/people_pan.png", None, "What objects can be seen in the image?", "0.9", "1.0"],
|
| 284 |
[f"{cur_dir}/examples/corgi.jpg", f"{cur_dir}/examples/corgi_pan.png", None, "What objects can be seen in the image?", "0.6", "0.7"],
|
| 285 |
-
[f"{cur_dir}/examples/suits.jpg", f"{cur_dir}/examples/suits_pan.png", f"{cur_dir}/examples/suits_depth.jpeg", "Can you describe the depth order of the objects in this image, from closest to farthest?", "0.
|
| 286 |
-
[f"{cur_dir}/examples/depth.jpeg", f"{cur_dir}/examples/depth_pan.png", f"{cur_dir}/examples/depth_depth.png", "Can you describe the depth order of the objects in this image, from closest to farthest?", "0.
|
| 287 |
[f"{cur_dir}/examples/friends.jpg", f"{cur_dir}/examples/friends_pan.png", None, "What is happening in the image?", "0.8", "0.9"],
|
| 288 |
[f"{cur_dir}/examples/suits.jpg", f"{cur_dir}/examples/suits_pan.png", None, "What objects can be seen in the image?", "0.5", "0.5"],
|
| 289 |
], inputs=[imagebox, segbox, depthbox, textbox, temperature, top_p])
|
|
|
|
| 222 |
def build_demo(embed_mode):
|
| 223 |
|
| 224 |
textbox = gr.Textbox(show_label=False, placeholder="Enter text and press ENTER", container=False)
|
| 225 |
+
with gr.Blocks(title="VCoder", theme=gr.themes.Default(), css=block_css) as demo:
|
| 226 |
state = gr.State()
|
| 227 |
|
| 228 |
if not embed_mode:
|
|
|
|
| 246 |
value="Default",
|
| 247 |
label="Preprocess for non-square image", visible=False)
|
| 248 |
|
| 249 |
+
with gr.Row():
|
| 250 |
+
segbox = gr.Image(type="pil", label="Seg Map")
|
| 251 |
+
seg_process_mode = gr.Radio(
|
| 252 |
+
["Crop", "Resize", "Pad", "Default"],
|
| 253 |
+
value="Default",
|
| 254 |
+
label="Preprocess for non-square Seg Map", visible=False)
|
| 255 |
+
|
| 256 |
+
depthbox = gr.Image(type="pil", label="Depth Map")
|
| 257 |
+
depth_process_mode = gr.Radio(
|
| 258 |
+
["Crop", "Resize", "Pad", "Default"],
|
| 259 |
+
value="Default",
|
| 260 |
+
label="Preprocess for non-square Depth Map", visible=False)
|
| 261 |
|
| 262 |
with gr.Accordion("Parameters", open=False) as parameter_row:
|
| 263 |
temperature = gr.Slider(minimum=0.0, maximum=1.0, value=0.8, step=0.1, interactive=True, label="Temperature",)
|
|
|
|
| 283 |
gr.Examples(examples=[
|
| 284 |
[f"{cur_dir}/examples/people.jpg", f"{cur_dir}/examples/people_pan.png", None, "What objects can be seen in the image?", "0.9", "1.0"],
|
| 285 |
[f"{cur_dir}/examples/corgi.jpg", f"{cur_dir}/examples/corgi_pan.png", None, "What objects can be seen in the image?", "0.6", "0.7"],
|
| 286 |
+
[f"{cur_dir}/examples/suits.jpg", f"{cur_dir}/examples/suits_pan.png", f"{cur_dir}/examples/suits_depth.jpeg", "Can you describe the depth order of the objects in this image, from closest to farthest?", "0.2", "0.5"],
|
| 287 |
+
[f"{cur_dir}/examples/depth.jpeg", f"{cur_dir}/examples/depth_pan.png", f"{cur_dir}/examples/depth_depth.png", "Can you describe the depth order of the objects in this image, from closest to farthest?", "0.2", "0.5"],
|
| 288 |
[f"{cur_dir}/examples/friends.jpg", f"{cur_dir}/examples/friends_pan.png", None, "What is happening in the image?", "0.8", "0.9"],
|
| 289 |
[f"{cur_dir}/examples/suits.jpg", f"{cur_dir}/examples/suits_pan.png", None, "What objects can be seen in the image?", "0.5", "0.5"],
|
| 290 |
], inputs=[imagebox, segbox, depthbox, textbox, temperature, top_p])
|