Skip to content

Commit

Permalink
Fix openrouter options
Browse files Browse the repository at this point in the history
  • Loading branch information
scosman committed Jan 31, 2025
1 parent 453ba88 commit 85fc4e8
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ async def _run(self, input: Dict | str) -> RunOutput:
# Case 1 or 2: Unstructured output, or "Thinking" LLM designed to output thinking in a structured format
messages.append({"role": "system", "content": cot_prompt})
elif not thinking_llm and cot_prompt and self.has_structured_output():
# Case 3: Normal LLM with structured output
# Base model (without structured output) used for COT message
base_model = await self.langchain_model_from()
messages.append(
Expand All @@ -164,8 +165,6 @@ async def _run(self, input: Dict | str) -> RunOutput:
messages.append(
SystemMessage(content="Considering the above, return a final result.")
)
elif cot_prompt:
messages.append(SystemMessage(content=cot_prompt))

response = await chain.ainvoke(messages)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,9 +55,7 @@ def __init__(

async def _run(self, input: Dict | str) -> RunOutput:
provider = await self.model_provider()

intermediate_outputs: dict[str, str] = {}

prompt = await self.build_prompt()
user_msg = self.prompt_builder.build_user_message(input)
messages = [
Expand Down Expand Up @@ -102,14 +100,11 @@ async def _run(self, input: Dict | str) -> RunOutput:
]
)

else:
intermediate_outputs = {}

extra_body = {}
if self.config.openrouter_style_reasoning:
if self.config.openrouter_style_reasoning and thinking_llm:
extra_body = {
"include_reasoning": True,
# Only use providers that support the reasoning parameter
# Filter to providers that support the reasoning parameter
"provider": {
"require_parameters": True,
},
Expand Down

0 comments on commit 85fc4e8

Please sign in to comment.