Skip to content

Commit

Permalink
implemented assertion for all model settings
Browse files Browse the repository at this point in the history
  • Loading branch information
awkrail committed Sep 18, 2024
1 parent c19b025 commit 7bda000
Showing 1 changed file with 4 additions and 7 deletions.
11 changes: 4 additions & 7 deletions tests/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,7 @@ def test_model_prediction():

query = 'A woman wearing a glass is speaking in front of the camera'
prediction = model.predict(query)
try:
assert len(prediction['pred_relevant_windows']) == MOMENT_NUM, \
f'The number of moments from {feature}_{model_name}_{dataset} is expected {MOMENT_NUM}, but got {len(prediction["pred_relevant_windows"])}.'
assert len(prediction['pred_saliency_scores']) == math.ceil(second / model._clip_len), \
f'The number of saliency scores from {feature}_{model_name}_{dataset} is expected {math.ceil(second / model._clip_len)}, but got {len(prediction["pred_saliency_scores"])}.'
except:
import ipdb; ipdb.set_trace()
assert len(prediction['pred_relevant_windows']) == MOMENT_NUM, \
f'The number of moments from {feature}_{model_name}_{dataset} is expected {MOMENT_NUM}, but got {len(prediction["pred_relevant_windows"])}.'
assert len(prediction['pred_saliency_scores']) == math.ceil(second / model._clip_len), \
f'The number of saliency scores from {feature}_{model_name}_{dataset} is expected {math.ceil(second / model._clip_len)}, but got {len(prediction["pred_saliency_scores"])}.'

0 comments on commit 7bda000

Please sign in to comment.