Skip to content

Commit acbeb74

Browse files
committed
Fix typo
1 parent c423e0b commit acbeb74

File tree

4 files changed

+4
-5
lines changed

4 files changed

+4
-5
lines changed

configs/my_config.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,4 @@ dataset:
55
data_path: 'data'
66
blip_v2_model_type: blip2-flan-t5-xxl # Change to blip2-flan-t5-xl for smaller GPUs
77
blip_half_precision: True
8-
# Add more changes here, following the same format as base_config.yaml
8+
# Add more changes here, following the same format as base_config.yaml

data/queries.csv

+3-2
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
1-
index,sample_id,possible_answers,query_type,info_to_prompt,query,answer,image_name,img,
2-
0,0,How many cookies are there?,seven,,How many cookies are there?,seven,cookies.png,cookies.png
1+
index,sample_id,possible_answers,query_type,query,answer,image_name
2+
0,0,purple,,What color do you get if you combine the colors of the viper and the flower?,purple,viper_flower.png
3+
0,0,,,Tell me about the competition between the two skyscrapers in the image.,,skyscrapers.png

main_batch.py

-1
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,6 @@ def main():
167167
# TODO compute Codex for next batch as current batch is being processed
168168

169169
if not config.use_cached_codex:
170-
# codes = codex(prompt=batch['info_to_qprompt'], base_prompt=base_prompt)
171170
codes = codex(prompt=batch['query'], base_prompt=base_prompt)
172171

173172
else:

vision_models.py

-1
Original file line numberDiff line numberDiff line change
@@ -1219,7 +1219,6 @@ def __init__(self, gpu_number=0,
12191219
}
12201220
with warnings.catch_warnings(), HiddenPrints("XVLM"):
12211221
model = XVLMBase(config_xvlm, use_contrastive_loss=True, vision_config=vision_config)
1222-
12231222
checkpoint = torch.load(path_checkpoint, map_location='cpu')
12241223
state_dict = checkpoint['model'] if 'model' in checkpoint.keys() else checkpoint
12251224
msg = model.load_state_dict(state_dict, strict=False)

0 commit comments

Comments
 (0)