feat: surrealdb queue and remove lapin and rabbitmq

This commit is contained in:
Per Stark
2025-01-09 21:13:42 +01:00
parent a87cb82b75
commit 0f8a83429a
28 changed files with 622 additions and 1306 deletions

View File

@@ -15,12 +15,15 @@ use tower_http::services::ServeDir;
use tracing::info;
use tracing_subscriber::{fmt, prelude::*, EnvFilter};
use zettle_db::{
rabbitmq::{consumer::RabbitMQConsumer, publisher::RabbitMQProducer, RabbitMQConfig},
ingress::jobqueue::JobQueue,
server::{
middleware_api_auth::api_auth,
routes::{
api::{
ingress::ingress_data, query::query_handler, queue_length::queue_length_handler,
ingress::ingress_data,
ingress_task::{delete_queue_task, get_queue_tasks},
query::query_handler,
queue_length::queue_length_handler,
},
html::{
account::{delete_account, set_api_key, show_account_page},
@@ -28,6 +31,7 @@ use zettle_db::{
gdpr::{accept_gdpr, deny_gdpr},
index::index_handler,
ingress::{process_ingress_form, show_ingress_form},
ingress_tasks::{delete_task, show_queue_tasks},
search_result::search_result_handler,
signin::{authenticate_user, show_signin_form},
signout::sign_out_user,
@@ -53,14 +57,6 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
info!("{:?}", config);
// Set up RabbitMQ
let rabbitmq_config = RabbitMQConfig {
amqp_addr: config.rabbitmq_address,
exchange: config.rabbitmq_exchange,
queue: config.rabbitmq_queue,
routing_key: config.rabbitmq_routing_key,
};
let reloader = AutoReloader::new(move |notifier| {
let template_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("templates");
let mut env = Environment::new();
@@ -71,19 +67,19 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
Ok(env)
});
let surreal_db_client = Arc::new(
SurrealDbClient::new(
&config.surrealdb_address,
&config.surrealdb_username,
&config.surrealdb_password,
&config.surrealdb_namespace,
&config.surrealdb_database,
)
.await?,
);
let app_state = AppState {
rabbitmq_producer: Arc::new(RabbitMQProducer::new(&rabbitmq_config).await?),
rabbitmq_consumer: Arc::new(RabbitMQConsumer::new(&rabbitmq_config, false).await?),
surreal_db_client: Arc::new(
SurrealDbClient::new(
&config.surrealdb_address,
&config.surrealdb_username,
&config.surrealdb_password,
&config.surrealdb_namespace,
&config.surrealdb_database,
)
.await?,
),
surreal_db_client: surreal_db_client.clone(),
openai_client: Arc::new(async_openai::Client::new()),
templates: Arc::new(reloader),
mailer: Arc::new(Mailer::new(
@@ -91,6 +87,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
config.smtp_relayer,
config.smtp_password,
)?),
job_queue: Arc::new(JobQueue::new(surreal_db_client)),
};
// setup_auth(&app_state.surreal_db_client).await?;
@@ -134,6 +131,8 @@ fn api_routes_v1(app_state: &AppState) -> Router<AppState> {
// Ingress routes
.route("/ingress", post(ingress_data))
.route("/message_count", get(queue_length_handler))
.route("/queue", get(get_queue_tasks))
.route("/queue/:delivery_tag", delete(delete_queue_task))
.layer(DefaultBodyLimit::max(1024 * 1024 * 1024))
// Query routes
.route("/query", post(query_handler))
@@ -158,6 +157,8 @@ fn html_routes(
"/ingress",
get(show_ingress_form).post(process_ingress_form),
)
.route("/queue", get(show_queue_tasks))
.route("/queue/:delivery_tag", post(delete_task))
.route("/account", get(show_account_page))
.route("/set-api-key", post(set_api_key))
.route("/delete-account", delete(delete_account))

View File

@@ -1,8 +1,11 @@
use std::sync::Arc;
use futures::StreamExt;
use tracing::{error, info};
use tracing_subscriber::{fmt, prelude::*, EnvFilter};
use zettle_db::{
ingress::content_processor::ContentProcessor,
rabbitmq::{consumer::RabbitMQConsumer, RabbitMQConfig, RabbitMQError},
ingress::{content_processor::ContentProcessor, jobqueue::JobQueue},
storage::db::SurrealDbClient,
utils::config::get_config,
};
@@ -15,49 +18,56 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
.try_init()
.ok();
info!("Starting RabbitMQ consumer");
let config = get_config()?;
// Set up RabbitMQ config
let rabbitmq_config = RabbitMQConfig {
amqp_addr: config.rabbitmq_address.clone(),
exchange: config.rabbitmq_exchange.clone(),
queue: config.rabbitmq_queue.clone(),
routing_key: config.rabbitmq_routing_key.clone(),
};
let job_queue = JobQueue::new(Arc::new(
SurrealDbClient::new(
&config.surrealdb_address,
&config.surrealdb_username,
&config.surrealdb_password,
&config.surrealdb_namespace,
&config.surrealdb_database,
)
.await?,
));
// Create a RabbitMQ consumer
let consumer = RabbitMQConsumer::new(&rabbitmq_config, true).await?;
let content_processor = ContentProcessor::new(&config).await?;
// Start consuming messages
loop {
match consumer.consume().await {
Ok((ingress, delivery)) => {
info!("Received IngressObject: {:?}", ingress);
// Get the TextContent
let text_content = ingress.to_text_content().await?;
// First, check for any unfinished jobs
let unfinished_jobs = job_queue.get_unfinished_jobs().await?;
// Initialize ContentProcessor which handles LLM analysis and storage
let content_processor = ContentProcessor::new(&config).await?;
if !unfinished_jobs.is_empty() {
info!("Found {} unfinished jobs", unfinished_jobs.len());
// Begin processing of TextContent
content_processor.process(&text_content).await?;
// Remove from queue
consumer.ack_delivery(delivery).await?;
}
Err(RabbitMQError::ConsumeError(e)) => {
error!("Error consuming message: {}", e);
// Optionally add a delay before trying again
tokio::time::sleep(tokio::time::Duration::from_secs(1)).await;
}
Err(e) => {
error!("Unexpected error: {}", e);
break;
for job in unfinished_jobs {
if let Err(e) = job_queue.process_job(job.clone(), &content_processor).await {
error!("Error processing job {}: {}", job.id, e);
}
}
}
}
Ok(())
// If no unfinished jobs, start listening for new ones
info!("Listening for new jobs...");
let mut job_stream = job_queue.listen_for_jobs().await?;
while let Some(notification) = job_stream.next().await {
match notification {
Ok(notification) => {
info!("Received new job: {}", notification.data.id);
if let Err(e) = job_queue
.process_job(notification.data, &content_processor)
.await
{
error!("Error processing job: {}", e);
}
}
Err(e) => error!("Error in job notification: {}", e),
}
}
// If we reach here, the stream has ended (connection lost?)
error!("Job stream ended unexpectedly, reconnecting...");
tokio::time::sleep(tokio::time::Duration::from_secs(5)).await;
}
}