Skip to content

Commit

Permalink
feat: Add suffix support in Ollama completions API in ollama_dart (da…
Browse files Browse the repository at this point in the history
  • Loading branch information
davidmigloz authored and KennethKnudsen97 committed Oct 1, 2024
1 parent 2e97d1f commit a28f9af
Show file tree
Hide file tree
Showing 5 changed files with 46 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ class GenerateCompletionRequest with _$GenerateCompletionRequest {
/// The prompt to generate a response.
required String prompt,

/// The text that comes after the inserted text.
@JsonKey(includeIfNull: false) String? suffix,

/// (optional) a list of Base64-encoded images to include in the message (for multimodal models such as llava)
@JsonKey(includeIfNull: false) List<String>? images,

Expand Down Expand Up @@ -74,6 +77,7 @@ class GenerateCompletionRequest with _$GenerateCompletionRequest {
static const List<String> propertyNames = [
'model',
'prompt',
'suffix',
'images',
'system',
'template',
Expand All @@ -95,6 +99,7 @@ class GenerateCompletionRequest with _$GenerateCompletionRequest {
return {
'model': model,
'prompt': prompt,
'suffix': suffix,
'images': images,
'system': system,
'template': template,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,10 @@ mixin _$GenerateCompletionRequest {
/// The prompt to generate a response.
String get prompt => throw _privateConstructorUsedError;

/// The text that comes after the inserted text.
@JsonKey(includeIfNull: false)
String? get suffix => throw _privateConstructorUsedError;

/// (optional) a list of Base64-encoded images to include in the message (for multimodal models such as llava)
@JsonKey(includeIfNull: false)
List<String>? get images => throw _privateConstructorUsedError;
Expand Down Expand Up @@ -91,6 +95,7 @@ abstract class $GenerateCompletionRequestCopyWith<$Res> {
$Res call(
{String model,
String prompt,
@JsonKey(includeIfNull: false) String? suffix,
@JsonKey(includeIfNull: false) List<String>? images,
@JsonKey(includeIfNull: false) String? system,
@JsonKey(includeIfNull: false) String? template,
Expand Down Expand Up @@ -123,6 +128,7 @@ class _$GenerateCompletionRequestCopyWithImpl<$Res,
$Res call({
Object? model = null,
Object? prompt = null,
Object? suffix = freezed,
Object? images = freezed,
Object? system = freezed,
Object? template = freezed,
Expand All @@ -142,6 +148,10 @@ class _$GenerateCompletionRequestCopyWithImpl<$Res,
? _value.prompt
: prompt // ignore: cast_nullable_to_non_nullable
as String,
suffix: freezed == suffix
? _value.suffix
: suffix // ignore: cast_nullable_to_non_nullable
as String?,
images: freezed == images
? _value.images
: images // ignore: cast_nullable_to_non_nullable
Expand Down Expand Up @@ -206,6 +216,7 @@ abstract class _$$GenerateCompletionRequestImplCopyWith<$Res>
$Res call(
{String model,
String prompt,
@JsonKey(includeIfNull: false) String? suffix,
@JsonKey(includeIfNull: false) List<String>? images,
@JsonKey(includeIfNull: false) String? system,
@JsonKey(includeIfNull: false) String? template,
Expand Down Expand Up @@ -238,6 +249,7 @@ class __$$GenerateCompletionRequestImplCopyWithImpl<$Res>
$Res call({
Object? model = null,
Object? prompt = null,
Object? suffix = freezed,
Object? images = freezed,
Object? system = freezed,
Object? template = freezed,
Expand All @@ -257,6 +269,10 @@ class __$$GenerateCompletionRequestImplCopyWithImpl<$Res>
? _value.prompt
: prompt // ignore: cast_nullable_to_non_nullable
as String,
suffix: freezed == suffix
? _value.suffix
: suffix // ignore: cast_nullable_to_non_nullable
as String?,
images: freezed == images
? _value._images
: images // ignore: cast_nullable_to_non_nullable
Expand Down Expand Up @@ -303,6 +319,7 @@ class _$GenerateCompletionRequestImpl extends _GenerateCompletionRequest {
const _$GenerateCompletionRequestImpl(
{required this.model,
required this.prompt,
@JsonKey(includeIfNull: false) this.suffix,
@JsonKey(includeIfNull: false) final List<String>? images,
@JsonKey(includeIfNull: false) this.system,
@JsonKey(includeIfNull: false) this.template,
Expand Down Expand Up @@ -332,6 +349,11 @@ class _$GenerateCompletionRequestImpl extends _GenerateCompletionRequest {
@override
final String prompt;

/// The text that comes after the inserted text.
@override
@JsonKey(includeIfNull: false)
final String? suffix;

/// (optional) a list of Base64-encoded images to include in the message (for multimodal models such as llava)
final List<String>? _images;

Expand Down Expand Up @@ -409,7 +431,7 @@ class _$GenerateCompletionRequestImpl extends _GenerateCompletionRequest {

@override
String toString() {
return 'GenerateCompletionRequest(model: $model, prompt: $prompt, images: $images, system: $system, template: $template, context: $context, options: $options, format: $format, raw: $raw, stream: $stream, keepAlive: $keepAlive)';
return 'GenerateCompletionRequest(model: $model, prompt: $prompt, suffix: $suffix, images: $images, system: $system, template: $template, context: $context, options: $options, format: $format, raw: $raw, stream: $stream, keepAlive: $keepAlive)';
}

@override
Expand All @@ -419,6 +441,7 @@ class _$GenerateCompletionRequestImpl extends _GenerateCompletionRequest {
other is _$GenerateCompletionRequestImpl &&
(identical(other.model, model) || other.model == model) &&
(identical(other.prompt, prompt) || other.prompt == prompt) &&
(identical(other.suffix, suffix) || other.suffix == suffix) &&
const DeepCollectionEquality().equals(other._images, _images) &&
(identical(other.system, system) || other.system == system) &&
(identical(other.template, template) ||
Expand All @@ -438,6 +461,7 @@ class _$GenerateCompletionRequestImpl extends _GenerateCompletionRequest {
runtimeType,
model,
prompt,
suffix,
const DeepCollectionEquality().hash(_images),
system,
template,
Expand Down Expand Up @@ -467,6 +491,7 @@ abstract class _GenerateCompletionRequest extends GenerateCompletionRequest {
const factory _GenerateCompletionRequest(
{required final String model,
required final String prompt,
@JsonKey(includeIfNull: false) final String? suffix,
@JsonKey(includeIfNull: false) final List<String>? images,
@JsonKey(includeIfNull: false) final String? system,
@JsonKey(includeIfNull: false) final String? template,
Expand Down Expand Up @@ -497,6 +522,11 @@ abstract class _GenerateCompletionRequest extends GenerateCompletionRequest {
String get prompt;
@override

/// The text that comes after the inserted text.
@JsonKey(includeIfNull: false)
String? get suffix;
@override

/// (optional) a list of Base64-encoded images to include in the message (for multimodal models such as llava)
@JsonKey(includeIfNull: false)
List<String>? get images;
Expand Down
2 changes: 2 additions & 0 deletions packages/ollama_dart/lib/src/generated/schema/schema.g.dart

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

18 changes: 7 additions & 11 deletions packages/ollama_dart/oas/ollama-curated.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -278,6 +278,9 @@ components:
type: string
description: The prompt to generate a response.
example: Why is the sky blue?
suffix:
type: string
description: The text that comes after the inserted text.
images:
type: array
description: (optional) a list of Base64-encoded images to include in the message (for multimodal models such as llava)
Expand Down Expand Up @@ -312,10 +315,10 @@ components:
description: &stream |
If `false` the response will be returned as a single response object, otherwise the response will be streamed as a series of objects.
default: false
keep_alive:
keep_alive: &keep_alive
type: integer
nullable: true
description: &keep_alive |
description: |
How long (in minutes) to keep the model loaded in memory.
- If set to a positive duration (e.g. 20), the model will stay loaded for the provided duration.
Expand Down Expand Up @@ -598,10 +601,7 @@ components:
type: boolean
description: *stream
default: false
keep_alive:
type: integer
nullable: true
description: *keep_alive
keep_alive: *keep_alive
required:
- model
- messages
Expand Down Expand Up @@ -697,10 +697,7 @@ components:
example: 'Here is an article about llamas...'
options:
$ref: '#/components/schemas/RequestOptions'
keep_alive:
type: integer
nullable: true
description: *keep_alive
keep_alive: *keep_alive
required:
- model
- prompt
Expand Down Expand Up @@ -831,7 +828,6 @@ components:
type: integer
nullable: true
description: The number of parameters in the model.

ProcessResponse:
type: object
description: Response class for the list running models endpoint.
Expand Down
2 changes: 1 addition & 1 deletion packages/ollama_dart/pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,5 +30,5 @@ dev_dependencies:
openapi_spec:
git:
url: https://github.com/davidmigloz/openapi_spec.git
ref: 280ae0d41806eda25e923203d67bd6f4992a81e9
ref: 93230a5e346b02789f0f727da8eecea9c7bdf118
test: ^1.25.2

0 comments on commit a28f9af

Please sign in to comment.