diff --git a/llm/cli.py b/llm/cli.py index b3c144bf..1a7519cf 100644 --- a/llm/cli.py +++ b/llm/cli.py @@ -521,7 +521,7 @@ def chat( raise click.ClickException(str(ex)) if prompt.strip() in ("exit", "quit"): break - response = conversation.prompt(prompt, system, **validated_options) + response = conversation.prompt(prompt, system=system, **validated_options) # System prompt only sent for the first message: system = None for chunk in response: diff --git a/llm/models.py b/llm/models.py index 06440eed..6364c27e 100644 --- a/llm/models.py +++ b/llm/models.py @@ -85,11 +85,18 @@ class Prompt: options: "Options" = field(default_factory=dict) def __init__( - self, prompt, model, attachments, system=None, prompt_json=None, options=None + self, + prompt, + model, + *, + attachments=None, + system=None, + prompt_json=None, + options=None ): self.prompt = prompt self.model = model - self.attachments = list(attachments) + self.attachments = list(attachments or []) self.system = system self.prompt_json = prompt_json self.options = options or {} @@ -105,7 +112,8 @@ class Conversation: def prompt( self, prompt: Optional[str], - *attachments: Attachment, + *, + attachments: Attachment = None, system: Optional[str] = None, stream: bool = True, **options @@ -386,7 +394,8 @@ def execute( def prompt( self, prompt: str, - *attachments: Attachment, + *, + attachments: Attachment = None, system: Optional[str] = None, stream: bool = True, **options @@ -396,7 +405,7 @@ def prompt( raise ValueError( "This model does not support attachments, but some were provided" ) - for attachment in attachments: + for attachment in attachments or []: attachment_type = attachment.resolve_type() if attachment_type not in self.attachment_types: raise ValueError( diff --git a/tests/test_chat.py b/tests/test_chat.py index cf7ddeff..01b2a0c0 100644 --- a/tests/test_chat.py +++ b/tests/test_chat.py @@ -23,7 +23,10 @@ def test_chat_basic(mock_model, logs_db): mock_model.enqueue(["one world"]) mock_model.enqueue(["one again"]) result = runner.invoke( - llm.cli.cli, ["chat", "-m", "mock"], input="Hi\nHi two\nquit\n" + llm.cli.cli, + ["chat", "-m", "mock"], + input="Hi\nHi two\nquit\n", + catch_exceptions=False, ) assert result.exit_code == 0 assert result.output == (