forked from ggerganov/llama.cpp
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[21-dec-24] using prompt if passed in chat completions, and not using messages - added python integration tests for changes in server - having add_special as request param to pass to upstream, allows pre-formatted chat messages to not be formatted again - modified workflow to download and cache the llama2-7b model used for integration testing [17-jan-25] updated to latest llama.cpp server.cpp had changes where ctx_server.vocab was used instead of the earlier ctx_server.ctx [26-jan-25] merged examples/server/utils.hpp, llama.cpp is starting to support tools in the chat template, resolved rebase conflict by merging our logic of preformatted messages with new feature adding the github workflow to create the artifacts based on platform and gpu architecture, for macos-cpu+metal, ubuntu-cpu+cuda, windows-cpu+cuda-12.4+11.6 also added for other ubuntu+windows cpu variants and gpu archs like vulkan etc.
- Loading branch information
Showing
7 changed files
with
879 additions
and
1 deletion.
There are no files selected for viewing
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,84 @@ | ||
import pytest | ||
from utils import * | ||
|
||
server = ServerPreset.llama2() | ||
|
||
|
||
@pytest.fixture(scope="module", autouse=True) | ||
def create_server(): | ||
global server | ||
server = ServerPreset.llama2() | ||
|
||
|
||
@pytest.mark.parametrize( | ||
"model,data,max_tokens,re_content,n_prompt,n_predicted,finish_reason, prompt", | ||
[ | ||
( | ||
"llama2", | ||
{ | ||
"messages": [ | ||
{"role": "system", "content": "You are a helpful assistant."}, | ||
{"role": "user", "content": "What day comes after Monday?"}, | ||
] | ||
}, | ||
16, | ||
"(Tuesday)+", | ||
56, | ||
8, | ||
"stop", | ||
"""<s> <|im_start|>system | ||
You are a helpful assistant.<|im_end|> | ||
<|im_start|>user | ||
What day comes after Monday?<|im_end|> | ||
<|im_start|>assistant | ||
""", | ||
), | ||
( | ||
"llama2", | ||
{ | ||
"prompt": """<s>[INST] <<SYS>> | ||
You are a helpful assistant. | ||
<</SYS>> | ||
What day comes after Monday? [/INST]""", | ||
"add_special": False, | ||
}, | ||
1024, | ||
"(Tuesday)+", | ||
33, | ||
25, | ||
"stop", | ||
"""<s> [INST] <<SYS>> | ||
You are a helpful assistant. | ||
<</SYS>> | ||
What day comes after Monday? [/INST]""", | ||
), | ||
], | ||
) | ||
def test_chat_completion_without_preformatted_prompt( | ||
model, data, max_tokens, re_content, n_prompt, n_predicted, finish_reason, prompt | ||
): | ||
global server | ||
server.start() | ||
res = server.make_request( | ||
"POST", | ||
"/chat/completions", | ||
data={ | ||
"model": model, | ||
"max_tokens": max_tokens, | ||
**data, | ||
}, | ||
) | ||
assert res.status_code == 200 | ||
assert ( | ||
"cmpl" in res.body["id"] | ||
) # make sure the completion id has the expected format | ||
assert res.body["model"] == model | ||
# assert res.body["usage"]["prompt_tokens"] == n_prompt | ||
# assert res.body["usage"]["completion_tokens"] == n_predicted | ||
choice = res.body["choices"][0] | ||
assert "assistant" == choice["message"]["role"] | ||
assert match_regex(re_content, choice["message"]["content"]) | ||
assert choice["finish_reason"] == finish_reason | ||
assert res.body["__verbose"]["prompt"] == prompt |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
set -x | ||
git checkout master | ||
git fetch gg | ||
git pull --rebase | ||
git rebase gg/master | ||
git push --force origin master | ||
|
||
git checkout bodhiserver_lastcommit | ||
git pull --rebase | ||
git checkout -b bodhiserver_newcommit | ||
git rebase origin/master | ||
|
||
cd examples/server/test && pip install -r requirements.txt && pytest | ||
git push -u origin bodhiserver_newcommit | ||
|
||
git checkout master |