wip: query html

This commit is contained in:
Per Stark
2024-12-10 16:46:04 +01:00
parent 4803632f0a
commit 15edc8aa6b
8 changed files with 173 additions and 7 deletions

View File

@@ -8,7 +8,7 @@ use crate::{error::ApiError, server::AppState};
pub async fn index_handler(State(state): State<AppState>) -> Result<Html<String>, ApiError> {
info!("Displaying index page");
let queue_length = state.rabbitmq_consumer.queue.message_count();
let queue_length = state.rabbitmq_consumer.get_queue_length().await?;
let output = state
.tera

View File

@@ -3,3 +3,4 @@ pub mod index;
pub mod ingress;
pub mod query;
pub mod queue_length;
pub mod search_result;

View File

@@ -15,16 +15,16 @@ pub struct QueryInput {
}
#[derive(Debug, Deserialize)]
struct Reference {
pub struct Reference {
#[allow(dead_code)]
reference: String,
pub reference: String,
}
#[derive(Debug, Deserialize)]
pub struct LLMResponseFormat {
answer: String,
pub answer: String,
#[allow(dead_code)]
references: Vec<Reference>,
pub references: Vec<Reference>,
}
pub async fn query_handler(

View File

@@ -0,0 +1,72 @@
use axum::{
extract::{Query, State},
response::Html,
};
use serde::Deserialize;
use serde_json::json;
use tera::Context;
use tracing::info;
use crate::{
error::ApiError,
retrieval::combined_knowledge_entity_retrieval,
server::{
routes::query::helper::{
create_chat_request, create_user_message, format_entities_json, process_llm_response,
},
AppState,
},
};
#[derive(Deserialize)]
pub struct SearchParams {
query: String,
}
pub async fn search_result_handler(
State(state): State<AppState>,
Query(query): Query<SearchParams>,
) -> Result<Html<String>, ApiError> {
info!("Displaying search results");
let openai_client = async_openai::Client::new();
// Retrieve entities
let entities = combined_knowledge_entity_retrieval(
&state.surreal_db_client,
&openai_client,
query.query.clone(),
)
.await?;
// Format entities and create message
let entities_json = format_entities_json(&entities);
let user_message = create_user_message(&entities_json, &query.query);
// Create and send request
let request = create_chat_request(user_message)?;
let response = openai_client
.chat()
.create(request)
.await
.map_err(|e| ApiError::QueryError(e.to_string()))?;
// Process response
let answer = process_llm_response(response).await?;
let references: Vec<String> = answer
.references
.into_iter()
.map(|reference| reference.reference)
.collect();
let output = state
.tera
.render(
"search_result.html",
&Context::from_value(json!({"result": answer.answer, "references": references}))
.unwrap(),
)
.unwrap();
Ok(output.into())
}