Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -90,7 +90,7 @@ def main(_):
90
  image_list = gr.State([])
91
  gr.Markdown('# LVM Demo')
92
  gr.Markdown('This is the demo of CVPR 2024 paper: Sequential Modeling Enables Scalable Learning for Large Vision Models. For more information about this paper please check the [website](https://yutongbai.com/lvm.html).')
93
- gr.Markdown(f'Serving model: {FLAGS.checkpoint}')
94
 
95
  gr.Markdown('**There are mainly two visual prompting: sequential prompting and analogy prompting.**')
96
  gr.Markdown('**For analogy prompting: describe the task with few-shot examples, which is pairs of (x, y) inputs where x is the input image and y the "annotated" image. And add one query image in the end. Download the few-shot examples dataset at [this link](https://livejohnshopkins-my.sharepoint.com/:f:/g/personal/ybai20_jh_edu/Ei0xiLdFFqJPnwAlFWar29EBUAvB0O3CVaJykZl-f11KDQ?e=Bx9SXZ), and you can simply change the query image in the end for testing.**')
 
90
  image_list = gr.State([])
91
  gr.Markdown('# LVM Demo')
92
  gr.Markdown('This is the demo of CVPR 2024 paper: Sequential Modeling Enables Scalable Learning for Large Vision Models. For more information about this paper please check the [website](https://yutongbai.com/lvm.html).')
93
+ gr.Markdown(f'Serving model: [{FLAGS.checkpoint}](https://huggingface.co/{FLAGS.checkpoint})')
94
 
95
  gr.Markdown('**There are mainly two visual prompting: sequential prompting and analogy prompting.**')
96
  gr.Markdown('**For analogy prompting: describe the task with few-shot examples, which is pairs of (x, y) inputs where x is the input image and y the "annotated" image. And add one query image in the end. Download the few-shot examples dataset at [this link](https://livejohnshopkins-my.sharepoint.com/:f:/g/personal/ybai20_jh_edu/Ei0xiLdFFqJPnwAlFWar29EBUAvB0O3CVaJykZl-f11KDQ?e=Bx9SXZ), and you can simply change the query image in the end for testing.**')