Skip to content

Commit

Permalink
WWB: simplify code around start_chat / use_template (#1650)
Browse files Browse the repository at this point in the history
See #1533

Co-authored-by: Alexander Kozlov <kozzzloff@list.ru>
  • Loading branch information
ilya-lavrenov and AlexKoff88 authored Jan 30, 2025
1 parent 97bb83a commit debf2c6
Showing 1 changed file with 2 additions and 16 deletions.
18 changes: 2 additions & 16 deletions tools/who_what_benchmark/whowhatbench/wwb.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,13 +263,7 @@ def diff_strings(a: str, b: str, *, use_loguru_colors: bool = False) -> str:


def genai_gen_text(model, tokenizer, question, max_new_tokens, skip_question, use_chat_template=False):
if use_chat_template:
model.start_chat()
result = model.generate(question, do_sample=False, max_new_tokens=max_new_tokens)
model.finish_chat()
return result
else:
return model.generate(question, do_sample=False, max_new_tokens=max_new_tokens, apply_chat_template=False)
return model.generate(question, do_sample=False, max_new_tokens=max_new_tokens, apply_chat_template=use_chat_template)


def llamacpp_gen_text(model, tokenizer, question, max_new_tokens, skip_question, use_chat_template=False):
Expand Down Expand Up @@ -335,15 +329,7 @@ def genai_gen_inpainting(model, prompt, image, mask, num_inference_steps, genera

def genai_gen_visual_text(model, prompt, image, processor, tokenizer, max_new_tokens, crop_question):
image_data = ov.Tensor(np.array(image.getdata()).reshape(1, image.size[1], image.size[0], 3).astype(np.uint8))
config = model.get_generation_config()
config.max_new_tokens = max_new_tokens
config.do_sample = False
config.apply_chat_template = False
model.set_generation_config(config)

model.start_chat()
out = model.generate(prompt, image=image_data)
model.finish_chat()
out = model.generate(prompt, image=image_data, do_sample=False, max_new_tokens=max_new_tokens)
return out.texts[0]


Expand Down

0 comments on commit debf2c6

Please sign in to comment.