refactor: add openai client and improve reference handling

This commit is contained in:
Per Stark
2024-12-10 17:38:06 +01:00
parent cd7604c0ef
commit 1c4b3284bf
11 changed files with 121 additions and 88 deletions

View File

@@ -57,7 +57,7 @@ impl<'a> IngressAnalyzer<'a> {
text, category, instructions
);
combined_knowledge_entity_retrieval(self.db_client, self.openai_client, input_text).await
combined_knowledge_entity_retrieval(self.db_client, self.openai_client, &input_text).await
}
fn prepare_llm_request(

View File

@@ -158,7 +158,7 @@ async fn create_single_entity(
llm_entity.name, llm_entity.description, llm_entity.entity_type
);
let embedding = generate_embedding(openai_client, embedding_input).await?;
let embedding = generate_embedding(openai_client, &embedding_input).await?;
Ok(KnowledgeEntity {
id: assigned_id,

View File

@@ -101,7 +101,7 @@ impl ContentProcessor {
// Could potentially process chunks in parallel with a bounded concurrent limit
for chunk in chunks {
let embedding = generate_embedding(&self.openai_client, chunk.to_string()).await?;
let embedding = generate_embedding(&self.openai_client, chunk).await?;
let text_chunk = TextChunk::new(content.id.to_string(), chunk.to_string(), embedding);
store_item(&self.db_client, text_chunk).await?;
}