diff --git a/src/zcmds/cmds/common/askai.py b/src/zcmds/cmds/common/askai.py index 4a91460..6359df9 100644 --- a/src/zcmds/cmds/common/askai.py +++ b/src/zcmds/cmds/common/askai.py @@ -16,7 +16,6 @@ ADVANCED_MODEL, AI_ASSISTANT_AS_PROGRAMMER, FAST_MODEL, - SLOW_MODEL, ChatBot, ChatGPTAuthenticationError, ChatGPTConnectionError, @@ -85,10 +84,7 @@ def parse_args() -> argparse.Namespace: "--fast", action="store_true", default=False, - help=f"chat gpt 3 turbo: {FAST_MODEL}", - ) - model_group.add_argument( - "--slow", action="store_true", default=False, help=f"chat gpt 4: {SLOW_MODEL}" + help=f"chat gpt 4o mini: {FAST_MODEL}", ) model_group.add_argument( "--advanced", @@ -127,20 +123,17 @@ def cli() -> int: max_tokens = args.max_tokens if args.fast: args.model = FAST_MODEL + max_tokens = 16384 if args.input_file: with open(args.input_file, "r") as file: args.prompt = file.read().strip() if args.model is None: - if args.slow: - model = SLOW_MODEL - if max_tokens is None: - max_tokens = 4096 - elif args.advanced: + if args.advanced: model = ADVANCED_MODEL - max_tokens = 128000 + max_tokens = 4096 else: model = FAST_MODEL - max_tokens = 4096 + max_tokens = 16384 else: model = args.model diff --git a/src/zcmds/util/chatgpt.py b/src/zcmds/util/chatgpt.py index 734bba2..573a100 100644 --- a/src/zcmds/util/chatgpt.py +++ b/src/zcmds/util/chatgpt.py @@ -22,9 +22,8 @@ HIDDEN_PROMPT_TOKEN_COUNT = ( 100 # this hack corrects for the unnaccounted for tokens in the prompt ) -ADVANCED_MODEL = "openai/gpt-4o" -SLOW_MODEL = "openai/gpt-4o" # now the same as ADVANCED_MODEL because it's so much better at everything -FAST_MODEL = "gpt-3.5-turbo" +ADVANCED_MODEL = "gpt-4-turbo" +FAST_MODEL = "gpt-4o-mini" AI_ASSISTANT_AS_PROGRAMMER = ( "You are a helpful assistant to a senior programmer. " "If I am asking how to do something in general then go ahead "