Add feed aggregation and embedding generation features
This commit is contained in:
268
lib/utils/agrigator.dart
Normal file
268
lib/utils/agrigator.dart
Normal file
@@ -0,0 +1,268 @@
|
||||
import 'dart:math';
|
||||
|
||||
import 'package:http/http.dart' as http;
|
||||
import 'package:xml/xml.dart';
|
||||
import 'openai.dart';
|
||||
|
||||
const String KEYWORDS = "Business news corporate earnings revenue profit stock market trading equity shares NYSE NASDAQ stock prices quarterly results annual reports CEO announcements executive leadership management changes board directors company strategy mergers acquisitions takeovers buyouts partnerships joint ventures business deals IPO initial public offerings venture capital funding investment rounds valuation startup unicorn enterprise technology product launches innovation R&D research development market expansion international business global markets trade agreements tariffs import export supply chain logistics manufacturing production operations facilities factories plants workforce hiring layoffs restructuring downsizing labor unions strikes employee relations workplace compensation benefits corporate governance shareholder activism proxy fights dividends stock buybacks analyst ratings price targets market capitalization revenue growth profit margins EBITDA cash flow debt financing credit ratings bonds corporate strategy competitive advantage market share industry trends sector analysis retail consumer goods e-commerce technology software hardware semiconductors pharmaceuticals biotech healthcare energy oil gas renewables automotive electric vehicles aerospace defense banking financial services insurance real estate construction infrastructure telecommunications media entertainment streaming gaming hospitality travel transportation logistics shipping airlines regulatory compliance antitrust competition policy lawsuits litigation settlements data breaches cybersecurity intellectual property patents trademarks brand value customer acquisition market positioning business models revenue streams profitability sustainability ESG environmental social governance";
|
||||
List<double>? KEYWORD_EMBEDDINGS;
|
||||
|
||||
class FeedItem {
|
||||
final String title;
|
||||
final String description;
|
||||
final String link;
|
||||
List<double>? embedding;
|
||||
|
||||
FeedItem({
|
||||
required this.title,
|
||||
required this.description,
|
||||
required this.link,
|
||||
this.embedding,
|
||||
});
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return "FeedItem(title: $title, link: $link)";
|
||||
}
|
||||
|
||||
FeedItem.fromJson(Map<String, dynamic> json)
|
||||
: title = json["title"],
|
||||
description = json["description"],
|
||||
link = json["link"],
|
||||
embedding = json["embedding"] != null
|
||||
? (json["embedding"] as List).map<double>((e) => (e as num).toDouble()).toList()
|
||||
: null;
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
return {
|
||||
"title": title,
|
||||
"description": description,
|
||||
"link": link,
|
||||
if (embedding != null) "embedding": embedding,
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
List<FeedItem> parseRssFeed(String rssXml) {
|
||||
final document = XmlDocument.parse(rssXml);
|
||||
|
||||
// find items in the RSS structre
|
||||
final items = document.findAllElements("item");
|
||||
|
||||
return items.map((item) {
|
||||
final title = item.findElements("title").firstOrNull?.innerText.trim() ?? "Untitled";
|
||||
final link = item.findElements("link").firstOrNull?.innerText ?? "";
|
||||
final description = item.findElements("description").firstOrNull?.innerText.trim() ?? "";
|
||||
|
||||
|
||||
return FeedItem(
|
||||
title: title,
|
||||
link: link,
|
||||
description: description,
|
||||
);
|
||||
}).toList();
|
||||
}
|
||||
|
||||
List<FeedItem> parseAtomFeed(String atomXml) {
|
||||
final document = XmlDocument.parse(atomXml);
|
||||
|
||||
// find entrys in atom feed
|
||||
final entries = document.findAllElements("entry");
|
||||
|
||||
return entries.map((entry) {
|
||||
final title = entry.findElements("title").firstOrNull?.innerText.trim() ?? "Untitled";
|
||||
final linkElement = entry.findElements("link").firstOrNull;
|
||||
final link = linkElement?.getAttribute("href") ?? "";
|
||||
final summary = entry.findElements("summary").firstOrNull?.innerText.trim();
|
||||
final content = entry.findElements("content").firstOrNull?.innerText.trim();
|
||||
|
||||
final description = (summary ?? content ?? "").trim();
|
||||
|
||||
return FeedItem(
|
||||
title: title,
|
||||
link: link,
|
||||
description: description,
|
||||
);
|
||||
}).toList();
|
||||
}
|
||||
|
||||
List<FeedItem> parseFeed(String feedXml) {
|
||||
final document = XmlDocument.parse(feedXml);
|
||||
|
||||
// Check if it's an Atom feed
|
||||
if (document.findAllElements('feed').isNotEmpty) {
|
||||
return parseAtomFeed(feedXml);
|
||||
}
|
||||
|
||||
// Check if it's an RSS feed
|
||||
if (document.findAllElements('rss').isNotEmpty ||
|
||||
document.findAllElements('channel').isNotEmpty) {
|
||||
return parseRssFeed(feedXml);
|
||||
}
|
||||
|
||||
// Unknown feed format
|
||||
throw FormatException('Unknown feed format. Expected RSS or Atom.');
|
||||
}
|
||||
|
||||
Future<List<FeedItem>> fetchFeed(Uri feedUri) async {
|
||||
final response = await http.get(feedUri);
|
||||
|
||||
|
||||
if (response.statusCode != 200) {
|
||||
throw Exception("Failed to fetch feed: ${response.statusCode}");
|
||||
}
|
||||
|
||||
// parse the XML response
|
||||
return parseFeed(response.body);
|
||||
}
|
||||
|
||||
Future<List<FeedItem>> fetchFeeds(List<Uri> feedUris) async {
|
||||
List<FeedItem> allItems = [];
|
||||
|
||||
final results = await Future.wait(
|
||||
feedUris.map((uri) => fetchFeed(uri).catchError((e) {
|
||||
print("Error fetching feed $uri: $e");
|
||||
return <FeedItem>[];
|
||||
}))
|
||||
);
|
||||
|
||||
for (final items in results) {
|
||||
allItems.addAll(items);
|
||||
}
|
||||
|
||||
return allItems;
|
||||
}
|
||||
|
||||
|
||||
// generete embeddng for a feed item
|
||||
Future<void> generateEmbedding(FeedItem item, String apiKey) async {
|
||||
final openai = OpenAI(apiKey: apiKey);
|
||||
|
||||
// combine tittle and descriptin
|
||||
final textToEmbed = "${item.title} ${item.description}";
|
||||
|
||||
try {
|
||||
final response = await openai.embeddings.create(
|
||||
model: "text-embedding-3-small",
|
||||
input: textToEmbed,
|
||||
);
|
||||
|
||||
if (response.data.isNotEmpty) {
|
||||
item.embedding = response.data.first.embedding;
|
||||
}
|
||||
} catch (e) {
|
||||
print("Error generatng embedding: $e");
|
||||
} finally {
|
||||
openai.dispose();
|
||||
}
|
||||
}
|
||||
|
||||
// generate embedings for multiple feed items
|
||||
Future<void> generateEmbeddings(List<FeedItem> items, String apiKey) async {
|
||||
await Future.wait(
|
||||
items.map((item) => generateEmbedding(item, apiKey))
|
||||
);
|
||||
}
|
||||
|
||||
Future<void> generateKeywordEmbeddings(String apiKey) async {
|
||||
|
||||
if (KEYWORD_EMBEDDINGS != null) {
|
||||
return; // already generated
|
||||
}
|
||||
|
||||
final openai = OpenAI(apiKey: apiKey);
|
||||
|
||||
try {
|
||||
final response = await openai.embeddings.create(
|
||||
model: "text-embedding-3-small",
|
||||
input: KEYWORDS,
|
||||
);
|
||||
|
||||
if (response.data.isNotEmpty) {
|
||||
KEYWORD_EMBEDDINGS = response.data.first.embedding;
|
||||
}
|
||||
} catch (e) {
|
||||
print("Error generating keyword embeddings: $e");
|
||||
} finally {
|
||||
openai.dispose();
|
||||
}
|
||||
}
|
||||
|
||||
bool isFeedItemRelevant(FeedItem item, [double threshold = 0.25]) {
|
||||
if (item.embedding == null || KEYWORD_EMBEDDINGS == null) {
|
||||
throw Exception("Embeddings not available for comparison.");
|
||||
}
|
||||
|
||||
double similarity = cosineSimilarity(item.embedding!, KEYWORD_EMBEDDINGS!);
|
||||
|
||||
return similarity >= threshold;
|
||||
}
|
||||
|
||||
double cosineSimilarity(List<double> vecA, List<double> vecB) {
|
||||
if (vecA.length != vecB.length) {
|
||||
throw ArgumentError("Vectors must be of the same length");
|
||||
}
|
||||
|
||||
double dotProduct = 0.0;
|
||||
double magnitudeA = 0.0;
|
||||
double magnitudeB = 0.0;
|
||||
|
||||
for (int i = 0; i < vecA.length; i++) {
|
||||
dotProduct += vecA[i] * vecB[i];
|
||||
magnitudeA += vecA[i] * vecA[i];
|
||||
magnitudeB += vecB[i] * vecB[i];
|
||||
}
|
||||
|
||||
if (magnitudeA == 0 || magnitudeB == 0) {
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
return dotProduct / (sqrt(magnitudeA) * sqrt(magnitudeB));
|
||||
}
|
||||
|
||||
List<List<FeedItem>> groupFeedItemsByEvent(List<FeedItem> items, [double similarityThreshold = 0.7]) {
|
||||
// Track which group each item belongs to and with what similarity
|
||||
Map<int, ({int groupIndex, double similarity})> itemGrouping = {};
|
||||
List<List<FeedItem>> groupedItems = [];
|
||||
|
||||
for (int i = 0; i < items.length; i++) {
|
||||
// Create a new group with item i as the anchor
|
||||
List<FeedItem> currentGroup = [items[i]];
|
||||
int currentGroupIndex = groupedItems.length;
|
||||
|
||||
// item i belongs to its own group with similarity 1.0
|
||||
itemGrouping[i] = (groupIndex: currentGroupIndex, similarity: 1.0);
|
||||
|
||||
// Check all later items
|
||||
for (int j = i + 1; j < items.length; j++) {
|
||||
double similarity = cosineSimilarity(
|
||||
items[i].embedding!,
|
||||
items[j].embedding!,
|
||||
);
|
||||
|
||||
if (similarity >= similarityThreshold) {
|
||||
// Check if j should join this group
|
||||
if (!itemGrouping.containsKey(j)) {
|
||||
// j hasn't been grouped yet, add it
|
||||
currentGroup.add(items[j]);
|
||||
itemGrouping[j] = (groupIndex: currentGroupIndex, similarity: similarity);
|
||||
} else if (similarity > itemGrouping[j]!.similarity) {
|
||||
// j is in another group but this is a better match
|
||||
// Remove from old group
|
||||
int oldGroupIndex = itemGrouping[j]!.groupIndex;
|
||||
groupedItems[oldGroupIndex].remove(items[j]);
|
||||
|
||||
// Add to this group
|
||||
currentGroup.add(items[j]);
|
||||
itemGrouping[j] = (groupIndex: currentGroupIndex, similarity: similarity);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
groupedItems.add(currentGroup);
|
||||
}
|
||||
|
||||
// Filter out empty groups (items may have been moved out)
|
||||
return groupedItems.where((group) => group.isNotEmpty).toList();
|
||||
}
|
||||
434
lib/utils/openai.dart
Normal file
434
lib/utils/openai.dart
Normal file
@@ -0,0 +1,434 @@
|
||||
import 'dart:async';
|
||||
import 'dart:convert';
|
||||
import 'package:http/http.dart' as http;
|
||||
|
||||
/// OpenAI API client for Dart
|
||||
class OpenAI {
|
||||
final String apiKey;
|
||||
final String baseUrl;
|
||||
final http.Client _client;
|
||||
|
||||
OpenAI({
|
||||
required this.apiKey,
|
||||
this.baseUrl = 'https://api.openai.com/v1',
|
||||
http.Client? client,
|
||||
}) : _client = client ?? http.Client();
|
||||
|
||||
/// Access to chat completions API
|
||||
ChatCompletions get chat => ChatCompletions(this);
|
||||
|
||||
/// Access to embeddings API
|
||||
Embeddings get embeddings => Embeddings(this);
|
||||
|
||||
void dispose() {
|
||||
_client.close();
|
||||
}
|
||||
}
|
||||
|
||||
/// Chat completions API
|
||||
class ChatCompletions {
|
||||
final OpenAI _openai;
|
||||
|
||||
ChatCompletions(this._openai);
|
||||
|
||||
/// Access to completions endpoint
|
||||
Completions get completions => Completions(_openai);
|
||||
}
|
||||
|
||||
/// Completions endpoint
|
||||
class Completions {
|
||||
final OpenAI _openai;
|
||||
|
||||
Completions(this._openai);
|
||||
|
||||
/// Create a chat completion
|
||||
///
|
||||
/// If [stream] is true, returns a Stream of ChatCompletionChunk
|
||||
/// If [stream] is false, returns a single ChatCompletion
|
||||
Future<dynamic> create({
|
||||
required String model,
|
||||
required List<dynamic> messages,
|
||||
bool stream = false,
|
||||
StreamOptions? streamOptions,
|
||||
double? temperature,
|
||||
double? topP,
|
||||
int? n,
|
||||
List<String>? stop,
|
||||
int? maxTokens,
|
||||
double? presencePenalty,
|
||||
double? frequencyPenalty,
|
||||
Map<String, dynamic>? logitBias,
|
||||
String? user,
|
||||
}) async {
|
||||
final body = {
|
||||
'model': model,
|
||||
'messages': messages,
|
||||
'stream': stream,
|
||||
if (streamOptions != null) 'stream_options': streamOptions.toJson(),
|
||||
if (temperature != null) 'temperature': temperature,
|
||||
if (topP != null) 'top_p': topP,
|
||||
if (n != null) 'n': n,
|
||||
if (stop != null) 'stop': stop,
|
||||
if (maxTokens != null) 'max_tokens': maxTokens,
|
||||
if (presencePenalty != null) 'presence_penalty': presencePenalty,
|
||||
if (frequencyPenalty != null) 'frequency_penalty': frequencyPenalty,
|
||||
if (logitBias != null) 'logit_bias': logitBias,
|
||||
if (user != null) 'user': user,
|
||||
};
|
||||
|
||||
if (stream) {
|
||||
return _streamCompletion(body);
|
||||
} else {
|
||||
return _createCompletion(body);
|
||||
}
|
||||
}
|
||||
|
||||
Future<ChatCompletion> _createCompletion(Map<String, dynamic> body) async {
|
||||
final response = await _openai._client.post(
|
||||
Uri.parse('${_openai.baseUrl}/chat/completions'),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': 'Bearer ${_openai.apiKey}',
|
||||
},
|
||||
body: jsonEncode(body),
|
||||
);
|
||||
|
||||
if (response.statusCode != 200) {
|
||||
throw OpenAIException(
|
||||
statusCode: response.statusCode,
|
||||
message: response.body,
|
||||
);
|
||||
}
|
||||
|
||||
return ChatCompletion.fromJson(jsonDecode(response.body));
|
||||
}
|
||||
|
||||
Stream<ChatCompletionChunk> _streamCompletion(
|
||||
Map<String, dynamic> body) async* {
|
||||
final request = http.Request(
|
||||
'POST',
|
||||
Uri.parse('${_openai.baseUrl}/chat/completions'),
|
||||
);
|
||||
|
||||
request.headers.addAll({
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': 'Bearer ${_openai.apiKey}',
|
||||
});
|
||||
|
||||
request.body = jsonEncode(body);
|
||||
|
||||
final streamedResponse = await _openai._client.send(request);
|
||||
|
||||
if (streamedResponse.statusCode != 200) {
|
||||
final body = await streamedResponse.stream.bytesToString();
|
||||
throw OpenAIException(
|
||||
statusCode: streamedResponse.statusCode,
|
||||
message: body,
|
||||
);
|
||||
}
|
||||
|
||||
final stream = streamedResponse.stream
|
||||
.transform(utf8.decoder)
|
||||
.transform(const LineSplitter());
|
||||
|
||||
await for (final line in stream) {
|
||||
if (line.isEmpty) continue;
|
||||
if (line.startsWith(':')) continue; // Skip comments
|
||||
if (!line.startsWith('data: ')) continue;
|
||||
|
||||
final data = line.substring(6); // Remove 'data: ' prefix
|
||||
|
||||
if (data == '[DONE]') {
|
||||
break;
|
||||
}
|
||||
|
||||
try {
|
||||
final json = jsonDecode(data);
|
||||
yield ChatCompletionChunk.fromJson(json);
|
||||
} catch (e) {
|
||||
// Skip malformed chunks
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Chat message
|
||||
class ChatMessage {
|
||||
final String role;
|
||||
final String content;
|
||||
|
||||
ChatMessage({
|
||||
required this.role,
|
||||
required this.content,
|
||||
});
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'role': role,
|
||||
'content': content,
|
||||
};
|
||||
|
||||
factory ChatMessage.fromJson(Map<String, dynamic> json) => ChatMessage(
|
||||
role: json['role'],
|
||||
content: json['content'],
|
||||
);
|
||||
|
||||
factory ChatMessage.system(String content) =>
|
||||
ChatMessage(role: 'system', content: content);
|
||||
|
||||
factory ChatMessage.user(String content) =>
|
||||
ChatMessage(role: 'user', content: content);
|
||||
|
||||
factory ChatMessage.assistant(String content) =>
|
||||
ChatMessage(role: 'assistant', content: content);
|
||||
}
|
||||
|
||||
/// Stream options
|
||||
class StreamOptions {
|
||||
final bool includeUsage;
|
||||
|
||||
StreamOptions({this.includeUsage = false});
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'include_usage': includeUsage,
|
||||
};
|
||||
}
|
||||
|
||||
/// Chat completion response (non-streaming)
|
||||
class ChatCompletion {
|
||||
final String id;
|
||||
final String object;
|
||||
final int created;
|
||||
final String model;
|
||||
final List<Choice> choices;
|
||||
final Usage? usage;
|
||||
|
||||
ChatCompletion({
|
||||
required this.id,
|
||||
required this.object,
|
||||
required this.created,
|
||||
required this.model,
|
||||
required this.choices,
|
||||
this.usage,
|
||||
});
|
||||
|
||||
factory ChatCompletion.fromJson(Map<String, dynamic> json) => ChatCompletion(
|
||||
id: json['id'],
|
||||
object: json['object'],
|
||||
created: json['created'],
|
||||
model: json['model'],
|
||||
choices: (json['choices'] as List)
|
||||
.map((c) => Choice.fromJson(c))
|
||||
.toList(),
|
||||
usage: json['usage'] != null ? Usage.fromJson(json['usage']) : null,
|
||||
);
|
||||
}
|
||||
|
||||
/// Chat completion chunk (streaming)
|
||||
class ChatCompletionChunk {
|
||||
final String id;
|
||||
final String object;
|
||||
final int created;
|
||||
final String model;
|
||||
final List<ChunkChoice> choices;
|
||||
final Usage? usage;
|
||||
|
||||
ChatCompletionChunk({
|
||||
required this.id,
|
||||
required this.object,
|
||||
required this.created,
|
||||
required this.model,
|
||||
required this.choices,
|
||||
this.usage,
|
||||
});
|
||||
|
||||
factory ChatCompletionChunk.fromJson(Map<String, dynamic> json) =>
|
||||
ChatCompletionChunk(
|
||||
id: json['id'],
|
||||
object: json['object'],
|
||||
created: json['created'],
|
||||
model: json['model'],
|
||||
choices: (json['choices'] as List)
|
||||
.map((c) => ChunkChoice.fromJson(c))
|
||||
.toList(),
|
||||
usage: json['usage'] != null ? Usage.fromJson(json['usage']) : null,
|
||||
);
|
||||
}
|
||||
|
||||
/// Choice in non-streaming response
|
||||
class Choice {
|
||||
final int index;
|
||||
final ChatMessage message;
|
||||
final String? finishReason;
|
||||
|
||||
Choice({
|
||||
required this.index,
|
||||
required this.message,
|
||||
this.finishReason,
|
||||
});
|
||||
|
||||
factory Choice.fromJson(Map<String, dynamic> json) => Choice(
|
||||
index: json['index'],
|
||||
message: ChatMessage.fromJson(json['message']),
|
||||
finishReason: json['finish_reason'],
|
||||
);
|
||||
}
|
||||
|
||||
/// Choice in streaming response
|
||||
class ChunkChoice {
|
||||
final int index;
|
||||
final Delta? delta;
|
||||
final String? finishReason;
|
||||
|
||||
ChunkChoice({
|
||||
required this.index,
|
||||
this.delta,
|
||||
this.finishReason,
|
||||
});
|
||||
|
||||
factory ChunkChoice.fromJson(Map<String, dynamic> json) => ChunkChoice(
|
||||
index: json['index'],
|
||||
delta: json['delta'] != null ? Delta.fromJson(json['delta']) : null,
|
||||
finishReason: json['finish_reason'],
|
||||
);
|
||||
}
|
||||
|
||||
/// Delta content in streaming chunks
|
||||
class Delta {
|
||||
final String? role;
|
||||
final String? content;
|
||||
|
||||
Delta({
|
||||
this.role,
|
||||
this.content,
|
||||
});
|
||||
|
||||
factory Delta.fromJson(Map<String, dynamic> json) => Delta(
|
||||
role: json['role'],
|
||||
content: json['content'],
|
||||
);
|
||||
}
|
||||
|
||||
/// Token usage information
|
||||
class Usage {
|
||||
final int? promptTokens;
|
||||
final int? completionTokens;
|
||||
final int? totalTokens;
|
||||
|
||||
Usage({
|
||||
this.promptTokens,
|
||||
this.completionTokens,
|
||||
this.totalTokens,
|
||||
});
|
||||
|
||||
factory Usage.fromJson(Map<String, dynamic> json) => Usage(
|
||||
promptTokens: json['prompt_tokens'],
|
||||
completionTokens: json['completion_tokens'],
|
||||
totalTokens: json['total_tokens'],
|
||||
);
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
if (promptTokens != null) 'prompt_tokens': promptTokens,
|
||||
if (completionTokens != null) 'completion_tokens': completionTokens,
|
||||
if (totalTokens != null) 'total_tokens': totalTokens,
|
||||
};
|
||||
}
|
||||
|
||||
/// OpenAI API exception
|
||||
class OpenAIException implements Exception {
|
||||
final int statusCode;
|
||||
final String message;
|
||||
|
||||
OpenAIException({
|
||||
required this.statusCode,
|
||||
required this.message,
|
||||
});
|
||||
|
||||
@override
|
||||
String toString() => 'OpenAIException($statusCode): $message';
|
||||
}
|
||||
|
||||
/// Embeddings API
|
||||
class Embeddings {
|
||||
final OpenAI _openai;
|
||||
|
||||
Embeddings(this._openai);
|
||||
|
||||
/// Create embeddings for input text
|
||||
Future<EmbeddingResponse> create({
|
||||
required String model,
|
||||
required dynamic input, // String or List<String>
|
||||
String? user,
|
||||
String? encodingFormat, // 'float' or 'base64'
|
||||
int? dimensions,
|
||||
}) async {
|
||||
final body = {
|
||||
'model': model,
|
||||
'input': input,
|
||||
if (user != null) 'user': user,
|
||||
if (encodingFormat != null) 'encoding_format': encodingFormat,
|
||||
if (dimensions != null) 'dimensions': dimensions,
|
||||
};
|
||||
|
||||
final response = await _openai._client.post(
|
||||
Uri.parse('${_openai.baseUrl}/embeddings'),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': 'Bearer ${_openai.apiKey}',
|
||||
},
|
||||
body: jsonEncode(body),
|
||||
);
|
||||
|
||||
if (response.statusCode != 200) {
|
||||
throw OpenAIException(
|
||||
statusCode: response.statusCode,
|
||||
message: response.body,
|
||||
);
|
||||
}
|
||||
|
||||
return EmbeddingResponse.fromJson(jsonDecode(response.body));
|
||||
}
|
||||
}
|
||||
|
||||
/// Embedding response
|
||||
class EmbeddingResponse {
|
||||
final String object;
|
||||
final List<Embedding> data;
|
||||
final String model;
|
||||
final Usage? usage;
|
||||
|
||||
EmbeddingResponse({
|
||||
required this.object,
|
||||
required this.data,
|
||||
required this.model,
|
||||
this.usage,
|
||||
});
|
||||
|
||||
factory EmbeddingResponse.fromJson(Map<String, dynamic> json) =>
|
||||
EmbeddingResponse(
|
||||
object: json['object'],
|
||||
data: (json['data'] as List).map((e) => Embedding.fromJson(e)).toList(),
|
||||
model: json['model'],
|
||||
usage: json['usage'] != null ? Usage.fromJson(json['usage']) : null,
|
||||
);
|
||||
}
|
||||
|
||||
/// Individual embedding
|
||||
class Embedding {
|
||||
final String object;
|
||||
final int index;
|
||||
final List<double> embedding;
|
||||
|
||||
Embedding({
|
||||
required this.object,
|
||||
required this.index,
|
||||
required this.embedding,
|
||||
});
|
||||
|
||||
factory Embedding.fromJson(Map<String, dynamic> json) => Embedding(
|
||||
object: json['object'],
|
||||
index: json['index'],
|
||||
embedding: (json['embedding'] as List).map<double>((e) => (e as num).toDouble()).toList(),
|
||||
);
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user