mirror of
https://github.com/salesforce/BLIP.git
synced 2026-01-26 15:19:44 +00:00
update model url
This commit is contained in:
@@ -3,7 +3,7 @@ ann_root: 'annotation'
|
||||
coco_gt_root: 'annotation/coco_gt'
|
||||
|
||||
# set pretrained as a file path or an url
|
||||
pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model*_base_caption.pth'
|
||||
pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_caption_capfilt_large.pth'
|
||||
|
||||
# size of vit model; base or large
|
||||
vit: 'base'
|
||||
|
||||
@@ -2,7 +2,7 @@ image_root: '/export/share/datasets/vision/nocaps/'
|
||||
ann_root: 'annotation'
|
||||
|
||||
# set pretrained as a file path or an url
|
||||
pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model*_base_caption.pth'
|
||||
pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_caption_capfilt_large.pth'
|
||||
|
||||
vit: 'base'
|
||||
batch_size: 32
|
||||
|
||||
@@ -4,7 +4,7 @@ train_files: ['vqa_train','vqa_val','vg_qa']
|
||||
ann_root: 'annotation'
|
||||
|
||||
# set pretrained as a file path or an url
|
||||
pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model*_vqa.pth'
|
||||
pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_vqa_capfilt_large.pth'
|
||||
|
||||
# size of vit model; base or large
|
||||
vit: 'base'
|
||||
|
||||
@@ -99,7 +99,7 @@
|
||||
"image_size = 384\n",
|
||||
"image = load_demo_image(image_size=image_size, device=device)\n",
|
||||
"\n",
|
||||
"model_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model*_base_caption.pth'\n",
|
||||
"model_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_capfilt_large.pth'\n",
|
||||
" \n",
|
||||
"model = blip_decoder(pretrained=model_url, image_size=image_size, vit='base')\n",
|
||||
"model.eval()\n",
|
||||
@@ -153,7 +153,7 @@
|
||||
"image_size = 480\n",
|
||||
"image = load_demo_image(image_size=image_size, device=device) \n",
|
||||
"\n",
|
||||
"model_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model*_vqa.pth'\n",
|
||||
"model_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_vqa_capfilt_large.pth'\n",
|
||||
" \n",
|
||||
"model = blip_vqa(pretrained=model_url, image_size=image_size, vit='base')\n",
|
||||
"model.eval()\n",
|
||||
|
||||
Reference in New Issue
Block a user