From 63840020afbfe8c096315e1733d121ab79703889 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 11 Oct 2024 09:19:50 +0300 Subject: [PATCH] llama : fix llama_token_is_prefix --- src/llama-vocab.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/llama-vocab.cpp b/src/llama-vocab.cpp index 367b31bac541d..070de936536e0 100644 --- a/src/llama-vocab.cpp +++ b/src/llama-vocab.cpp @@ -1865,14 +1865,14 @@ bool llama_token_is_prefix_impl( char text_buf_0[128]; char text_buf_1[128]; - const int32_t len0 = llama_token_to_piece_impl(vocab, token0, text_buf_0, 128, 0, false); - const int32_t len1 = llama_token_to_piece_impl(vocab, token1, text_buf_1, 128, 0, false); + const int32_t len0 = llama_token_to_piece_impl(vocab, token0, text_buf_0, sizeof(text_buf_0) - 1, 0, false); + const int32_t len1 = llama_token_to_piece_impl(vocab, token1, text_buf_1, sizeof(text_buf_1) - 1, 0, false); if (len0 <= 0 || len1 <= 0) { return false; } - return len0 < len1 && memcmp(text_buf_0, text_buf_1, len0) == 0; + return len0 <= len1 && memcmp(text_buf_0, text_buf_1, len0) == 0; } int32_t llama_detokenize_impl(