diff --git a/litellm/llms/anthropic.py b/litellm/llms/anthropic.py index 4df032ba0097..f7d056bf9e57 100644 --- a/litellm/llms/anthropic.py +++ b/litellm/llms/anthropic.py @@ -105,8 +105,9 @@ def completion( optional_params=None, litellm_params=None, logger_fn=None, + headers={}, ): - headers = validate_environment(api_key) + headers = { **validate_environment(api_key), **headers } if model in custom_prompt_dict: # check if the model has a registered custom prompt model_prompt_details = custom_prompt_dict[model] diff --git a/litellm/main.py b/litellm/main.py index e8022f464f55..1915eb7d0303 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -967,6 +967,7 @@ def completion( encoding=encoding, # for calculating input/output tokens api_key=api_key, logging_obj=logging, + headers=headers, ) if "stream" in optional_params and optional_params["stream"] == True: # don't try to access stream object,