434 lines
10 KiB
Dart
434 lines
10 KiB
Dart
import 'dart:async';
|
|
import 'dart:convert';
|
|
import 'package:http/http.dart' as http;
|
|
|
|
/// OpenAI API client for Dart
|
|
class OpenAI {
|
|
final String apiKey;
|
|
final String baseUrl;
|
|
final http.Client _client;
|
|
|
|
OpenAI({
|
|
required this.apiKey,
|
|
this.baseUrl = 'https://api.openai.com/v1',
|
|
http.Client? client,
|
|
}) : _client = client ?? http.Client();
|
|
|
|
/// Access to chat completions API
|
|
ChatCompletions get chat => ChatCompletions(this);
|
|
|
|
/// Access to embeddings API
|
|
Embeddings get embeddings => Embeddings(this);
|
|
|
|
void dispose() {
|
|
_client.close();
|
|
}
|
|
}
|
|
|
|
/// Chat completions API
|
|
class ChatCompletions {
|
|
final OpenAI _openai;
|
|
|
|
ChatCompletions(this._openai);
|
|
|
|
/// Access to completions endpoint
|
|
Completions get completions => Completions(_openai);
|
|
}
|
|
|
|
/// Completions endpoint
|
|
class Completions {
|
|
final OpenAI _openai;
|
|
|
|
Completions(this._openai);
|
|
|
|
/// Create a chat completion
|
|
///
|
|
/// If [stream] is true, returns a Stream of ChatCompletionChunk
|
|
/// If [stream] is false, returns a single ChatCompletion
|
|
Future<dynamic> create({
|
|
required String model,
|
|
required List<dynamic> messages,
|
|
bool stream = false,
|
|
StreamOptions? streamOptions,
|
|
double? temperature,
|
|
double? topP,
|
|
int? n,
|
|
List<String>? stop,
|
|
int? maxTokens,
|
|
double? presencePenalty,
|
|
double? frequencyPenalty,
|
|
Map<String, dynamic>? logitBias,
|
|
String? user,
|
|
}) async {
|
|
final body = {
|
|
'model': model,
|
|
'messages': messages,
|
|
'stream': stream,
|
|
if (streamOptions != null) 'stream_options': streamOptions.toJson(),
|
|
if (temperature != null) 'temperature': temperature,
|
|
if (topP != null) 'top_p': topP,
|
|
if (n != null) 'n': n,
|
|
if (stop != null) 'stop': stop,
|
|
if (maxTokens != null) 'max_tokens': maxTokens,
|
|
if (presencePenalty != null) 'presence_penalty': presencePenalty,
|
|
if (frequencyPenalty != null) 'frequency_penalty': frequencyPenalty,
|
|
if (logitBias != null) 'logit_bias': logitBias,
|
|
if (user != null) 'user': user,
|
|
};
|
|
|
|
if (stream) {
|
|
return _streamCompletion(body);
|
|
} else {
|
|
return _createCompletion(body);
|
|
}
|
|
}
|
|
|
|
Future<ChatCompletion> _createCompletion(Map<String, dynamic> body) async {
|
|
final response = await _openai._client.post(
|
|
Uri.parse('${_openai.baseUrl}/chat/completions'),
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
'Authorization': 'Bearer ${_openai.apiKey}',
|
|
},
|
|
body: jsonEncode(body),
|
|
);
|
|
|
|
if (response.statusCode != 200) {
|
|
throw OpenAIException(
|
|
statusCode: response.statusCode,
|
|
message: response.body,
|
|
);
|
|
}
|
|
|
|
return ChatCompletion.fromJson(jsonDecode(response.body));
|
|
}
|
|
|
|
Stream<ChatCompletionChunk> _streamCompletion(
|
|
Map<String, dynamic> body) async* {
|
|
final request = http.Request(
|
|
'POST',
|
|
Uri.parse('${_openai.baseUrl}/chat/completions'),
|
|
);
|
|
|
|
request.headers.addAll({
|
|
'Content-Type': 'application/json',
|
|
'Authorization': 'Bearer ${_openai.apiKey}',
|
|
});
|
|
|
|
request.body = jsonEncode(body);
|
|
|
|
final streamedResponse = await _openai._client.send(request);
|
|
|
|
if (streamedResponse.statusCode != 200) {
|
|
final body = await streamedResponse.stream.bytesToString();
|
|
throw OpenAIException(
|
|
statusCode: streamedResponse.statusCode,
|
|
message: body,
|
|
);
|
|
}
|
|
|
|
final stream = streamedResponse.stream
|
|
.transform(utf8.decoder)
|
|
.transform(const LineSplitter());
|
|
|
|
await for (final line in stream) {
|
|
if (line.isEmpty) continue;
|
|
if (line.startsWith(':')) continue; // Skip comments
|
|
if (!line.startsWith('data: ')) continue;
|
|
|
|
final data = line.substring(6); // Remove 'data: ' prefix
|
|
|
|
if (data == '[DONE]') {
|
|
break;
|
|
}
|
|
|
|
try {
|
|
final json = jsonDecode(data);
|
|
yield ChatCompletionChunk.fromJson(json);
|
|
} catch (e) {
|
|
// Skip malformed chunks
|
|
continue;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Chat message
|
|
class ChatMessage {
|
|
final String role;
|
|
final String content;
|
|
|
|
ChatMessage({
|
|
required this.role,
|
|
required this.content,
|
|
});
|
|
|
|
Map<String, dynamic> toJson() => {
|
|
'role': role,
|
|
'content': content,
|
|
};
|
|
|
|
factory ChatMessage.fromJson(Map<String, dynamic> json) => ChatMessage(
|
|
role: json['role'],
|
|
content: json['content'],
|
|
);
|
|
|
|
factory ChatMessage.system(String content) =>
|
|
ChatMessage(role: 'system', content: content);
|
|
|
|
factory ChatMessage.user(String content) =>
|
|
ChatMessage(role: 'user', content: content);
|
|
|
|
factory ChatMessage.assistant(String content) =>
|
|
ChatMessage(role: 'assistant', content: content);
|
|
}
|
|
|
|
/// Stream options
|
|
class StreamOptions {
|
|
final bool includeUsage;
|
|
|
|
StreamOptions({this.includeUsage = false});
|
|
|
|
Map<String, dynamic> toJson() => {
|
|
'include_usage': includeUsage,
|
|
};
|
|
}
|
|
|
|
/// Chat completion response (non-streaming)
|
|
class ChatCompletion {
|
|
final String id;
|
|
final String object;
|
|
final int created;
|
|
final String model;
|
|
final List<Choice> choices;
|
|
final Usage? usage;
|
|
|
|
ChatCompletion({
|
|
required this.id,
|
|
required this.object,
|
|
required this.created,
|
|
required this.model,
|
|
required this.choices,
|
|
this.usage,
|
|
});
|
|
|
|
factory ChatCompletion.fromJson(Map<String, dynamic> json) => ChatCompletion(
|
|
id: json['id'],
|
|
object: json['object'],
|
|
created: json['created'],
|
|
model: json['model'],
|
|
choices: (json['choices'] as List)
|
|
.map((c) => Choice.fromJson(c))
|
|
.toList(),
|
|
usage: json['usage'] != null ? Usage.fromJson(json['usage']) : null,
|
|
);
|
|
}
|
|
|
|
/// Chat completion chunk (streaming)
|
|
class ChatCompletionChunk {
|
|
final String id;
|
|
final String object;
|
|
final int created;
|
|
final String model;
|
|
final List<ChunkChoice> choices;
|
|
final Usage? usage;
|
|
|
|
ChatCompletionChunk({
|
|
required this.id,
|
|
required this.object,
|
|
required this.created,
|
|
required this.model,
|
|
required this.choices,
|
|
this.usage,
|
|
});
|
|
|
|
factory ChatCompletionChunk.fromJson(Map<String, dynamic> json) =>
|
|
ChatCompletionChunk(
|
|
id: json['id'],
|
|
object: json['object'],
|
|
created: json['created'],
|
|
model: json['model'],
|
|
choices: (json['choices'] as List)
|
|
.map((c) => ChunkChoice.fromJson(c))
|
|
.toList(),
|
|
usage: json['usage'] != null ? Usage.fromJson(json['usage']) : null,
|
|
);
|
|
}
|
|
|
|
/// Choice in non-streaming response
|
|
class Choice {
|
|
final int index;
|
|
final ChatMessage message;
|
|
final String? finishReason;
|
|
|
|
Choice({
|
|
required this.index,
|
|
required this.message,
|
|
this.finishReason,
|
|
});
|
|
|
|
factory Choice.fromJson(Map<String, dynamic> json) => Choice(
|
|
index: json['index'],
|
|
message: ChatMessage.fromJson(json['message']),
|
|
finishReason: json['finish_reason'],
|
|
);
|
|
}
|
|
|
|
/// Choice in streaming response
|
|
class ChunkChoice {
|
|
final int index;
|
|
final Delta? delta;
|
|
final String? finishReason;
|
|
|
|
ChunkChoice({
|
|
required this.index,
|
|
this.delta,
|
|
this.finishReason,
|
|
});
|
|
|
|
factory ChunkChoice.fromJson(Map<String, dynamic> json) => ChunkChoice(
|
|
index: json['index'],
|
|
delta: json['delta'] != null ? Delta.fromJson(json['delta']) : null,
|
|
finishReason: json['finish_reason'],
|
|
);
|
|
}
|
|
|
|
/// Delta content in streaming chunks
|
|
class Delta {
|
|
final String? role;
|
|
final String? content;
|
|
|
|
Delta({
|
|
this.role,
|
|
this.content,
|
|
});
|
|
|
|
factory Delta.fromJson(Map<String, dynamic> json) => Delta(
|
|
role: json['role'],
|
|
content: json['content'],
|
|
);
|
|
}
|
|
|
|
/// Token usage information
|
|
class Usage {
|
|
final int? promptTokens;
|
|
final int? completionTokens;
|
|
final int? totalTokens;
|
|
|
|
Usage({
|
|
this.promptTokens,
|
|
this.completionTokens,
|
|
this.totalTokens,
|
|
});
|
|
|
|
factory Usage.fromJson(Map<String, dynamic> json) => Usage(
|
|
promptTokens: json['prompt_tokens'],
|
|
completionTokens: json['completion_tokens'],
|
|
totalTokens: json['total_tokens'],
|
|
);
|
|
|
|
Map<String, dynamic> toJson() => {
|
|
if (promptTokens != null) 'prompt_tokens': promptTokens,
|
|
if (completionTokens != null) 'completion_tokens': completionTokens,
|
|
if (totalTokens != null) 'total_tokens': totalTokens,
|
|
};
|
|
}
|
|
|
|
/// OpenAI API exception
|
|
class OpenAIException implements Exception {
|
|
final int statusCode;
|
|
final String message;
|
|
|
|
OpenAIException({
|
|
required this.statusCode,
|
|
required this.message,
|
|
});
|
|
|
|
@override
|
|
String toString() => 'OpenAIException($statusCode): $message';
|
|
}
|
|
|
|
/// Embeddings API
|
|
class Embeddings {
|
|
final OpenAI _openai;
|
|
|
|
Embeddings(this._openai);
|
|
|
|
/// Create embeddings for input text
|
|
Future<EmbeddingResponse> create({
|
|
required String model,
|
|
required dynamic input, // String or List<String>
|
|
String? user,
|
|
String? encodingFormat, // 'float' or 'base64'
|
|
int? dimensions,
|
|
}) async {
|
|
final body = {
|
|
'model': model,
|
|
'input': input,
|
|
if (user != null) 'user': user,
|
|
if (encodingFormat != null) 'encoding_format': encodingFormat,
|
|
if (dimensions != null) 'dimensions': dimensions,
|
|
};
|
|
|
|
final response = await _openai._client.post(
|
|
Uri.parse('${_openai.baseUrl}/embeddings'),
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
'Authorization': 'Bearer ${_openai.apiKey}',
|
|
},
|
|
body: jsonEncode(body),
|
|
);
|
|
|
|
if (response.statusCode != 200) {
|
|
throw OpenAIException(
|
|
statusCode: response.statusCode,
|
|
message: response.body,
|
|
);
|
|
}
|
|
|
|
return EmbeddingResponse.fromJson(jsonDecode(response.body));
|
|
}
|
|
}
|
|
|
|
/// Embedding response
|
|
class EmbeddingResponse {
|
|
final String object;
|
|
final List<Embedding> data;
|
|
final String model;
|
|
final Usage? usage;
|
|
|
|
EmbeddingResponse({
|
|
required this.object,
|
|
required this.data,
|
|
required this.model,
|
|
this.usage,
|
|
});
|
|
|
|
factory EmbeddingResponse.fromJson(Map<String, dynamic> json) =>
|
|
EmbeddingResponse(
|
|
object: json['object'],
|
|
data: (json['data'] as List).map((e) => Embedding.fromJson(e)).toList(),
|
|
model: json['model'],
|
|
usage: json['usage'] != null ? Usage.fromJson(json['usage']) : null,
|
|
);
|
|
}
|
|
|
|
/// Individual embedding
|
|
class Embedding {
|
|
final String object;
|
|
final int index;
|
|
final List<double> embedding;
|
|
|
|
Embedding({
|
|
required this.object,
|
|
required this.index,
|
|
required this.embedding,
|
|
});
|
|
|
|
factory Embedding.fromJson(Map<String, dynamic> json) => Embedding(
|
|
object: json['object'],
|
|
index: json['index'],
|
|
embedding: (json['embedding'] as List).map<double>((e) => (e as num).toDouble()).toList(),
|
|
);
|
|
|
|
} |