Skip to content

Commit

Permalink
more cleanups
Browse files Browse the repository at this point in the history
  • Loading branch information
ochafik committed Jan 22, 2025
1 parent 28cac49 commit 2dd09c7
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 19 deletions.
26 changes: 8 additions & 18 deletions common/sampling.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -151,22 +151,16 @@ struct common_sampler * common_sampler_init(const struct llama_model * model, co

lparams.no_perf = params.no_perf;

std::vector<const char *> c_trigger_words;
c_trigger_words.reserve(params.grammar_trigger_words.size());
std::vector<const char *> trigger_words;
trigger_words.reserve(params.grammar_trigger_words.size());
for (const auto & str : params.grammar_trigger_words) {
c_trigger_words.push_back(str.c_str());
trigger_words.push_back(str.c_str());
}
auto * result = new common_sampler {
/* .params = */ params,
/* .grmr = */ llama_sampler_init_grammar(
vocab,
params.grammar.c_str(),
"root",
c_trigger_words.data(),
c_trigger_words.size(),
params.grammar_trigger_tokens.data(),
params.grammar_trigger_tokens.size()
),
/* .grmr = */ llama_sampler_init_grammar(vocab, params.grammar.c_str(), "root",
trigger_words.data(), trigger_words.size(),
params.grammar_trigger_tokens.data(), params.grammar_trigger_tokens.size()),
/* .chain = */ llama_sampler_chain_init(lparams),
/* .prev = */ ring_buffer<llama_token>(std::max(32, params.n_prev)),
/* .cur = */ {},
Expand Down Expand Up @@ -237,9 +231,7 @@ struct common_sampler * common_sampler_init(const struct llama_model * model, co

void common_sampler_free(struct common_sampler * gsmpl) {
if (gsmpl) {
if (gsmpl->grmr) {
llama_sampler_free(gsmpl->grmr);
}
llama_sampler_free(gsmpl->grmr);

llama_sampler_free(gsmpl->chain);

Expand All @@ -258,9 +250,7 @@ void common_sampler_accept(struct common_sampler * gsmpl, llama_token token, boo
}

void common_sampler_reset(struct common_sampler * gsmpl) {
if (gsmpl->grmr) {
llama_sampler_reset(gsmpl->grmr);
}
llama_sampler_reset(gsmpl->grmr);

llama_sampler_reset(gsmpl->chain);
}
Expand Down
1 change: 0 additions & 1 deletion src/llama-sampling.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1504,7 +1504,6 @@ struct llama_sampler * llama_sampler_init_grammar(
size_t num_trigger_words,
const llama_token * trigger_tokens,
size_t num_trigger_tokens) {
// struct llama_sampler * llama_sampler_init_grammar(const struct llama_vocab * vocab, const char * grammar_str, const char * grammar_root) {
auto * ctx = new llama_sampler_grammar;

if (grammar_str != nullptr && grammar_str[0] != '\0') {
Expand Down

0 comments on commit 2dd09c7

Please sign in to comment.