Skip to content

Commit

Permalink
chore(script): migrate towards repo implementations
Browse files Browse the repository at this point in the history
  • Loading branch information
mattgauf committed Aug 27, 2023
1 parent 6a29083 commit 855b808
Show file tree
Hide file tree
Showing 8 changed files with 44 additions and 797 deletions.
661 changes: 0 additions & 661 deletions .github/workflows/build.yml

This file was deleted.

65 changes: 0 additions & 65 deletions .github/workflows/docker.yml

This file was deleted.

17 changes: 0 additions & 17 deletions .github/workflows/editorconfig.yml

This file was deleted.

20 changes: 0 additions & 20 deletions .github/workflows/tidy-post.yml

This file was deleted.

23 changes: 0 additions & 23 deletions .github/workflows/tidy-review.yml

This file was deleted.

12 changes: 6 additions & 6 deletions examples/alpaca.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,13 @@
cd `dirname $0`
cd ..

./main -f ./prompts/alpaca.txt \
-n 1 \
-t 7 \
-b 256 \
-ins \
./main -m ./models/alpaca.13b.ggmlv3.q8_0.bin \
--color \
-f ./prompts/alpaca.txt \
--ctx_size 2048 \
-n -1 \
-ins -b 256 \
--top_k 10000 \
--temp 0.2 \
--repeat_penalty 1.1
--repeat_penalty 1.1 \
-t 7
2 changes: 1 addition & 1 deletion examples/chat-13B.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ set -e

cd "$(dirname "$0")/.." || exit

MODEL="${MODEL:-../models/13B/ggml-model-q4_0.bin}"
MODEL="${MODEL:-./models/13B/ggml-model-q8_0.gguf}"
PROMPT_TEMPLATE=${PROMPT_TEMPLATE:-./prompts/chat.txt}
USER_NAME="${USER_NAME:-USER}"
AI_NAME="${AI_NAME:-LLaMa}"
Expand Down
41 changes: 37 additions & 4 deletions llama.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,40 @@
#!/bin/bash
#
# Temporary script - will be removed in the future
#

./main -m ../models/13B/ggml-model-q4_0.bin -n 256 --repeat_penalty 1.0 --color -i -r "User:" -f prompts/chat.txt
set -e

MODEL="${MODEL:-./models/13B/ggml-model-q8_0.gguf}"
PROMPT_TEMPLATE=${PROMPT_TEMPLATE:-./prompts/chat.txt}
USER_NAME="${USER_NAME:-Matt}"
AI_NAME="${AI_NAME:-LLaMa}"

# Adjust to the number of CPU cores you want to use.
N_THREAD="${N_THREAD:-8}"

# Number of tokens to predict (made it larger than default because we want a long interaction)
N_PREDICTS="${N_PREDICTS:-2048}"

# Note: you can also override the generation options by specifying them on the command line:
# For example, override the context size by doing: ./chatLLaMa --ctx_size 1024
GEN_OPTIONS="${GEN_OPTIONS:---ctx_size 2048 --temp 0.7 --top_k 40 --top_p 0.5 --repeat_last_n 256 --batch_size 1024 --repeat_penalty 1.17647}"

DATE_TIME=$(date +%H:%M)
DATE_YEAR=$(date +%Y)

PROMPT_FILE=$(mktemp -t llamacpp_prompt.XXXXXXX.txt)

sed -e "s/\[\[USER_NAME\]\]/$USER_NAME/g" \
-e "s/\[\[AI_NAME\]\]/$AI_NAME/g" \
-e "s/\[\[DATE_TIME\]\]/$DATE_TIME/g" \
-e "s/\[\[DATE_YEAR\]\]/$DATE_YEAR/g" \
$PROMPT_TEMPLATE > $PROMPT_FILE

# shellcheck disable=SC2086 # Intended splitting of GEN_OPTIONS
./main $GEN_OPTIONS \
--model "$MODEL" \
--threads "$N_THREAD" \
--n_predict "$N_PREDICTS" \
--color --interactive \
--file ${PROMPT_FILE} \
--reverse-prompt "${USER_NAME}:" \
--in-prefix ' ' \
"$@"

0 comments on commit 855b808

Please sign in to comment.