From 1124e9c1d6b95bc9f6cf80cd21e8bee5c61e3cb5 Mon Sep 17 00:00:00 2001 From: Robert Date: Fri, 3 Jan 2025 21:40:27 -0800 Subject: [PATCH] Fixes for api Call Streaming checks --- App_Function_Libraries/LLM_API_Calls.py | 87 ++++++++++++- App_Function_Libraries/LLM_API_Calls_Local.py | 118 ++++++++++++++++-- 2 files changed, 190 insertions(+), 15 deletions(-) diff --git a/App_Function_Libraries/LLM_API_Calls.py b/App_Function_Libraries/LLM_API_Calls.py index 1c8a7e3b..e08fa8de 100644 --- a/App_Function_Libraries/LLM_API_Calls.py +++ b/App_Function_Libraries/LLM_API_Calls.py @@ -503,6 +503,18 @@ def chat_with_cohere(api_key, input_data, model=None, custom_prompt_arg=None, sy logging.debug(f"Cohere Chat: Loaded data: {input_data}") logging.debug(f"Cohere Chat: Type of data: {type(input_data)}") + if isinstance(streaming, str): + streaming = streaming.lower() == "true" + elif isinstance(streaming, int): + streaming = bool(streaming) # Convert integers (1/0) to boolean + elif streaming is None: + streaming = loaded_config_data.get('cohere_api', {}).get('streaming', False) + logging.debug("Cohere: Streaming mode enabled") + else: + logging.debug("Cohere: Streaming mode disabled") + if not isinstance(streaming, bool): + raise ValueError(f"Invalid type for 'streaming': Expected a boolean, got {type(streaming).__name__}") + # Ensure model is set if not model: model = loaded_config_data['cohere_api']['model'] @@ -672,11 +684,18 @@ def chat_with_groq(api_key, input_data, custom_prompt_arg, temp=None, system_mes logging.debug(f"Groq: Using API Key: {groq_api_key[:5]}...{groq_api_key[-5:]}") - streaming = loaded_config_data['groq_api']['streaming'] - if streaming == "true" or "True": - streaming = True + if isinstance(streaming, str): + streaming = streaming.lower() == "true" + elif isinstance(streaming, int): + streaming = bool(streaming) # Convert integers (1/0) to boolean + elif streaming is None: + streaming = loaded_config_data.get('groq_api', {}).get('streaming', False) + logging.debug("Groq: Streaming mode enabled") else: - streaming = False + logging.debug("Groq: Streaming mode disabled") + if not isinstance(streaming, bool): + raise ValueError(f"Invalid type for 'streaming': Expected a boolean, got {type(streaming).__name__}") + # Transcript data handling & Validation if isinstance(input_data, str) and os.path.isfile(input_data): logging.debug("Groq: Loading json data for summarization") @@ -821,6 +840,18 @@ def chat_with_openrouter(api_key, input_data, custom_prompt_arg, temp=None, syst else: logging.warning("OpenRouter: No API key found in config file") + if isinstance(streaming, str): + streaming = streaming.lower() == "true" + elif isinstance(streaming, int): + streaming = bool(streaming) # Convert integers (1/0) to boolean + elif streaming is None: + streaming = loaded_config_data.get('openrouter_api', {}).get('streaming', False) + logging.debug("OpenRouter: Streaming mode enabled") + else: + logging.debug("OpenRouter: Streaming mode disabled") + if not isinstance(streaming, bool): + raise ValueError(f"Invalid type for 'streaming': Expected a boolean, got {type(streaming).__name__}") + # Model Selection validation logging.debug("OpenRouter: Validating model selection") loaded_config_data = load_and_log_configs() @@ -995,6 +1026,18 @@ def chat_with_huggingface(api_key, input_data, custom_prompt_arg, system_prompt= "Authorization": f"Bearer {huggingface_api_key}" } + if isinstance(streaming, str): + streaming = streaming.lower() == "true" + elif isinstance(streaming, int): + streaming = bool(streaming) # Convert integers (1/0) to boolean + elif streaming is None: + streaming = loaded_config_data.get('huggingface_api', {}).get('streaming', False) + logging.debug("HuggingFace: Streaming mode enabled") + else: + logging.debug("HuggingFace: Streaming mode disabled") + if not isinstance(streaming, bool): + raise ValueError(f"Invalid type for 'streaming': Expected a boolean, got {type(streaming).__name__}") + # Setup model huggingface_model = loaded_config_data['huggingface_api']['model'] @@ -1107,6 +1150,18 @@ def chat_with_deepseek(api_key, input_data, custom_prompt_arg, temp=0.1, system_ logging.debug("DeepSeek: Using API Key") + if isinstance(streaming, str): + streaming = streaming.lower() == "true" + elif isinstance(streaming, int): + streaming = bool(streaming) # Convert integers (1/0) to boolean + elif streaming is None: + streaming = loaded_config_data.get('deepseek_api', {}).get('streaming', False) + logging.debug("DeepSeek: Streaming mode enabled") + else: + logging.debug("DeepSeek: Streaming mode disabled") + if not isinstance(streaming, bool): + raise ValueError(f"Invalid type for 'streaming': Expected a boolean, got {type(streaming).__name__}") + # Input data handling if isinstance(input_data, str) and os.path.isfile(input_data): logging.debug("DeepSeek: Loading JSON data for summarization") @@ -1297,6 +1352,18 @@ def chat_with_mistral(api_key, input_data, custom_prompt_arg, temp=None, system_ logging.debug(f"Mistral: Using API Key: {mistral_api_key[:5]}...{mistral_api_key[-5:]}") + if isinstance(streaming, str): + streaming = streaming.lower() == "true" + elif isinstance(streaming, int): + streaming = bool(streaming) # Convert integers (1/0) to boolean + elif streaming is None: + streaming = loaded_config_data.get('mistral_api', {}).get('streaming', False) + logging.debug("Mistral: Streaming mode enabled") + else: + logging.debug("Mistral: Streaming mode disabled") + if not isinstance(streaming, bool): + raise ValueError(f"Invalid type for 'streaming': Expected a boolean, got {type(streaming).__name__}") + logging.debug("Mistral: Using provided string data") data = input_data @@ -1426,6 +1493,18 @@ def chat_with_google(api_key, input_data, custom_prompt_arg, temp=None, system_m logging.debug(f"Google: Using API Key: {google_api_key[:5]}...{google_api_key[-5:]}") + if isinstance(streaming, str): + streaming = streaming.lower() == "true" + elif isinstance(streaming, int): + streaming = bool(streaming) # Convert integers (1/0) to boolean + elif streaming is None: + streaming = loaded_config_data.get('google_api', {}).get('streaming', False) + logging.debug("Google: Streaming mode enabled") + else: + logging.debug("Google: Streaming mode disabled") + if not isinstance(streaming, bool): + raise ValueError(f"Invalid type for 'streaming': Expected a boolean, got {type(streaming).__name__}") + # Input data handling logging.debug(f"Google: Raw input data type: {type(input_data)}") logging.debug(f"Google: Raw input data (first 500 chars): {str(input_data)[:500]}...") diff --git a/App_Function_Libraries/LLM_API_Calls_Local.py b/App_Function_Libraries/LLM_API_Calls_Local.py index 058190c4..ae2ed42d 100644 --- a/App_Function_Libraries/LLM_API_Calls_Local.py +++ b/App_Function_Libraries/LLM_API_Calls_Local.py @@ -151,6 +151,18 @@ def chat_with_llama(input_data, custom_prompt, temp, api_url="http://127.0.0.1:8 logging.info("llama.cpp: API URL not found or is empty") return "llama.cpp: API URL not found or is empty" + if isinstance(streaming, str): + streaming = streaming.lower() == "true" + elif isinstance(streaming, int): + streaming = bool(streaming) # Convert integers (1/0) to boolean + elif streaming is None: + streaming = loaded_config_data.get('llama_api', {}).get('streaming', False) + logging.debug("Llama.cpp: Streaming mode enabled") + else: + logging.debug("Llama.cpp: Streaming mode disabled") + if not isinstance(streaming, bool): + raise ValueError(f"Invalid type for 'streaming': Expected a boolean, got {type(streaming).__name__}") + # Prepare headers headers = { 'accept': 'application/json', @@ -268,6 +280,18 @@ def chat_with_kobold(input_data, api_key, custom_prompt_input, kobold_api_ip="ht logging.debug(f"Kobold: Using API Key: {kobold_api_key[:5]}...{kobold_api_key[-5:]}") + if isinstance(streaming, str): + streaming = streaming.lower() == "true" + elif isinstance(streaming, int): + streaming = bool(streaming) # Convert integers (1/0) to boolean + elif streaming is None: + streaming = loaded_config_data.get('kobold_api', {}).get('streaming', False) + logging.debug("Kobold.cpp: Streaming mode enabled") + else: + logging.debug("Kobold.cpp: Streaming mode disabled") + if not isinstance(streaming, bool): + raise ValueError(f"Invalid type for 'streaming': Expected a boolean, got {type(streaming).__name__}") + if isinstance(input_data, str) and os.path.isfile(input_data): logging.debug("Kobold.cpp: Loading json data for summarization") with open(input_data, 'r') as file: @@ -429,6 +453,18 @@ def chat_with_oobabooga(input_data, api_key, custom_prompt, api_url="http://127. if system_prompt is None: system_prompt = "You are a helpful AI assistant that provides accurate and concise information." + if isinstance(streaming, str): + streaming = streaming.lower() == "true" + elif isinstance(streaming, int): + streaming = bool(streaming) # Convert integers (1/0) to boolean + elif streaming is None: + streaming = loaded_config_data.get('ooba_api', {}).get('streaming', False) + logging.debug("Oobabooga: Streaming mode enabled") + else: + logging.debug("Oobabooga: Streaming mode disabled") + if not isinstance(streaming, bool): + raise ValueError(f"Invalid type for 'streaming': Expected a boolean, got {type(streaming).__name__}") + headers = { 'accept': 'application/json', 'content-type': 'application/json', @@ -546,6 +582,18 @@ def chat_with_tabbyapi( else: logging.warning("TabbyAPI: No API key found in config file") + if isinstance(streaming, str): + streaming = streaming.lower() == "true" + elif isinstance(streaming, int): + streaming = bool(streaming) # Convert integers (1/0) to boolean + elif streaming is None: + streaming = loaded_config_data.get('tabby_api', {}).get('streaming', False) + logging.debug("TabbyAPI: Streaming mode enabled") + else: + logging.debug("TabbyAPI: Streaming mode disabled") + if not isinstance(streaming, bool): + raise ValueError(f"Invalid type for 'streaming': Expected a boolean, got {type(streaming).__name__}") + # Set API IP and model from config.txt tabby_api_ip = loaded_config_data['tabby_api']['api_ip'] tabby_model = loaded_config_data['tabby_api']['model'] @@ -653,7 +701,7 @@ def chat_with_tabbyapi( # FIXME aphrodite engine - code was literally tab complete in one go from copilot... :/ -def chat_with_aphrodite(input_data, custom_prompt_input, api_key=None, api_IP="http://127.0.0.1:8080/completion"): +def chat_with_aphrodite(input_data, custom_prompt_input, api_key=None, api_IP="http://127.0.0.1:8080/completion", streaming=False): loaded_config_data = load_and_log_configs() model = loaded_config_data['aphrodite_api']['model'] # API key validation @@ -665,6 +713,18 @@ def chat_with_aphrodite(input_data, custom_prompt_input, api_key=None, api_IP="h if api_key is None or api_key.strip() == "": logging.info("aphrodite: API key not found or is empty") + if isinstance(streaming, str): + streaming = streaming.lower() == "true" + elif isinstance(streaming, int): + streaming = bool(streaming) # Convert integers (1/0) to boolean + elif streaming is None: + streaming = loaded_config_data.get('aphrodite_api', {}).get('streaming', False) + logging.debug("Aphrodite: Streaming mode enabled") + else: + logging.debug("Aphrodite: Streaming mode disabled") + if not isinstance(streaming, bool): + raise ValueError(f"Invalid type for 'streaming': Expected a boolean, got {type(streaming).__name__}") + headers = { 'Authorization': f'Bearer {api_key}', 'Content-Type': 'application/json' @@ -713,19 +773,31 @@ def chat_with_ollama( else: logging.warning("Ollama: No API key found in config file") - # Set model from parameter or config + # Set model from parameter or config + if model is None: + model = loaded_config_data['ollama_api'].get('model') if model is None: - model = loaded_config_data['ollama_api'].get('model') - if model is None: - logging.error("Ollama: Model not found in config file") - return "Ollama: Model not found in config file" + logging.error("Ollama: Model not found in config file") + return "Ollama: Model not found in config file" - # Set api_url from parameter or config + # Set api_url from parameter or config + if api_url is None: + api_url = loaded_config_data['ollama_api'].get('api_ip') if api_url is None: - api_url = loaded_config_data['ollama_api'].get('api_ip') - if api_url is None: - logging.error("Ollama: API URL not found in config file") - return "Ollama: API URL not found in config file" + logging.error("Ollama: API URL not found in config file") + return "Ollama: API URL not found in config file" + + if isinstance(streaming, str): + streaming = streaming.lower() == "true" + elif isinstance(streaming, int): + streaming = bool(streaming) # Convert integers (1/0) to boolean + elif streaming is None: + streaming = loaded_config_data.get('ollama_api', {}).get('streaming', False) + logging.debug("Ollama: Streaming mode enabled") + else: + logging.debug("Ollama: Streaming mode disabled") + if not isinstance(streaming, bool): + raise ValueError(f"Invalid type for 'streaming': Expected a boolean, got {type(streaming).__name__}") # Load transcript logging.debug("Ollama: Loading JSON data") @@ -921,6 +993,18 @@ def chat_with_vllm( else: logging.error("vLLM: API URL not found in config file") + if isinstance(streaming, str): + streaming = streaming.lower() == "true" + elif isinstance(streaming, int): + streaming = bool(streaming) # Convert integers (1/0) to boolean + elif streaming is None: + streaming = loaded_config_data.get('vllm_api', {}).get('streaming', False) + logging.debug("vllm: Streaming mode enabled") + else: + logging.debug("vllm: Streaming mode disabled") + if not isinstance(streaming, bool): + raise ValueError(f"Invalid type for 'streaming': Expected a boolean, got {type(streaming).__name__}") + logging.debug(f"vLLM: Using API Key: {vllm_api_key[:5]}...{vllm_api_key[-5:] if vllm_api_key else 'None'}") # Process input data @@ -1023,6 +1107,18 @@ def chat_with_custom_openai(api_key, input_data, custom_prompt_arg, temp=None, s logging.debug(f"Custom OpenAI API: Using API Key: {custom_openai_api_key[:5]}...{custom_openai_api_key[-5:]}") + if isinstance(streaming, str): + streaming = streaming.lower() == "true" + elif isinstance(streaming, int): + streaming = bool(streaming) # Convert integers (1/0) to boolean + elif streaming is None: + streaming = loaded_config_data.get('custom_openai_api', {}).get('streaming', False) + logging.debug("Custom OpenAI API: Streaming mode enabled") + else: + logging.debug("Custom OpenAI API: Streaming mode disabled") + if not isinstance(streaming, bool): + raise ValueError(f"Invalid type for 'streaming': Expected a boolean, got {type(streaming).__name__}") + # Input data handling logging.debug(f"Custom OpenAI API: Raw input data type: {type(input_data)}") logging.debug(f"Custom OpenAI API: Raw input data (first 500 chars): {str(input_data)[:500]}...")