refactoring: wip

This commit is contained in:
Per Stark
2024-11-20 16:09:35 +01:00
parent ec347d524c
commit 7222223c31
5 changed files with 100 additions and 14 deletions

View File

@@ -1,5 +1,6 @@
pub mod models;
pub mod rabbitmq;
pub mod routes;
pub mod storage;
pub mod surrealdb;
pub mod utils;

View File

@@ -1,18 +1,28 @@
use axum::{
extract::DefaultBodyLimit, routing::{delete, get, post, put}, Extension, Router
extract::DefaultBodyLimit,
routing::{delete, get, post, put},
Extension, Router,
};
use tracing_subscriber::{fmt, prelude::*, EnvFilter};
use zettle_db::{rabbitmq::{publisher::RabbitMQProducer, RabbitMQConfig}, routes::{file::{delete_file_handler, get_file_handler, update_file_handler, upload_handler}, ingress::ingress_handler, queue_length::queue_length_handler}, surrealdb::SurrealDbClient};
use std::sync::Arc;
use tracing_subscriber::{fmt, prelude::*, EnvFilter};
use zettle_db::{
rabbitmq::{publisher::RabbitMQProducer, RabbitMQConfig},
routes::{
file::{delete_file_handler, get_file_handler, update_file_handler, upload_handler},
ingress::ingress_handler,
queue_length::queue_length_handler,
},
surrealdb::SurrealDbClient,
};
#[tokio::main(flavor = "multi_thread", worker_threads = 2)]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
// Set up tracing
tracing_subscriber::registry()
.with(fmt::layer())
.with(EnvFilter::from_default_env())
.try_init()
.ok();
.with(fmt::layer())
.with(EnvFilter::from_default_env())
.try_init()
.ok();
// Set up RabbitMQ
let config = RabbitMQConfig {
@@ -21,14 +31,13 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
queue: "my_queue".to_string(),
routing_key: "my_key".to_string(),
};
// Set up producer
let producer = Arc::new(RabbitMQProducer::new(&config).await?);
// Set up database client
let db_client = Arc::new(SurrealDbClient::new().await?);
// Create Axum router
let app = Router::new()
.route("/ingress", post(ingress_handler))
@@ -36,16 +45,14 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
.layer(Extension(producer))
.route("/file", post(upload_handler))
.layer(DefaultBodyLimit::max(1024 * 1024 * 1024))
.route("/file/:uuid", get(get_file_handler))
.route("/file/:uuid", put(update_file_handler))
.route("/file/:uuid", get(get_file_handler))
.route("/file/:uuid", put(update_file_handler))
.route("/file/:uuid", delete(delete_file_handler))
.layer(Extension(db_client));
tracing::info!("Listening on 0.0.0.0:3000");
let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await?;
axum::serve(listener, app).await?;
Ok(())
}

1
src/storage/mod.rs Normal file
View File

@@ -0,0 +1 @@
pub mod types;

42
src/storage/types/mod.rs Normal file
View File

@@ -0,0 +1,42 @@
pub mod text_content;
#[macro_export]
macro_rules! stored_entity {
($name:ident, $table:expr, {$($field:ident: $ty:ty),*}) => {
use axum::async_trait;
use serde::{Deserialize, Deserializer, Serialize};
use surrealdb::sql::Thing;
fn thing_to_string<'de, D>(deserializer: D) -> Result<String, D::Error>
where
D: Deserializer<'de>,
{
let thing = Thing::deserialize(deserializer)?;
Ok(thing.id.to_raw())
}
#[async_trait]
pub trait StoredEntity: Serialize + for<'de> Deserialize<'de> {
fn table_name() -> &'static str;
fn get_id(&self) -> &str;
}
#[derive(Debug, Serialize, Deserialize)]
pub struct $name {
#[serde(deserialize_with = "thing_to_string")]
pub id: String,
$(pub $field: $ty),*
}
#[async_trait]
impl StoredEntity for $name {
fn table_name() -> &'static str {
$table
}
fn get_id(&self) -> &str {
&self.id
}
}
};
}

View File

@@ -0,0 +1,35 @@
use uuid::Uuid;
use crate::models::file_info::FileInfo;
use crate::stored_entity;
stored_entity!(TextContent, "text_content", {
text: String,
file_info: Option<FileInfo>,
instructions: String,
category: String
});
impl TextContent {
pub fn new(text: String, instructions: String, category: String) -> Self {
Self {
id: Uuid::new_v4().to_string(),
text,
file_info: None,
instructions,
category,
}
}
// Other methods...
}
fn test() {
let content = TextContent::new(
"hiho".to_string(),
"instructions".to_string(),
"cat".to_string(),
);
content.get_id();
}