feat: customizable data storage path

This commit is contained in:
Per Stark
2025-05-09 23:28:36 +02:00
parent 89badb3bed
commit c49005c258
16 changed files with 261 additions and 40 deletions

View File

@@ -5,6 +5,7 @@ use common::{storage::db::SurrealDbClient, utils::config::AppConfig};
#[derive(Clone)]
pub struct ApiState {
pub db: Arc<SurrealDbClient>,
pub config: AppConfig,
}
impl ApiState {
@@ -24,6 +25,7 @@ impl ApiState {
let app_state = ApiState {
db: surreal_db_client.clone(),
config: config.clone(),
};
Ok(app_state)

View File

@@ -30,12 +30,9 @@ pub async fn ingest_data(
) -> Result<impl IntoResponse, ApiError> {
info!("Received input: {:?}", input);
let file_infos = try_join_all(
input
.files
.into_iter()
.map(|file| FileInfo::new(file, &state.db, &user.id).map_err(AppError::from)),
)
let file_infos = try_join_all(input.files.into_iter().map(|file| {
FileInfo::new(file, &state.db, &user.id, &state.config).map_err(AppError::from)
}))
.await?;
let payloads = IngestionPayload::create_ingestion_payload(

View File

@@ -11,7 +11,9 @@ use tokio::fs::remove_dir_all;
use tracing::info;
use uuid::Uuid;
use crate::{error::AppError, storage::db::SurrealDbClient, stored_object};
use crate::{
error::AppError, storage::db::SurrealDbClient, stored_object, utils::config::AppConfig,
};
#[derive(Error, Debug)]
pub enum FileError {
@@ -47,7 +49,9 @@ impl FileInfo {
field_data: FieldData<NamedTempFile>,
db_client: &SurrealDbClient,
user_id: &str,
config: &AppConfig,
) -> Result<Self, FileError> {
info!("Data_dir: {:?}", config);
let file = field_data.contents;
let file_name = field_data
.metadata
@@ -79,7 +83,7 @@ impl FileInfo {
updated_at: now,
file_name,
sha256,
path: Self::persist_file(&uuid, file, &sanitized_file_name, user_id)
path: Self::persist_file(&uuid, file, &sanitized_file_name, user_id, config)
.await?
.to_string_lossy()
.into(),
@@ -161,13 +165,14 @@ impl FileInfo {
}
}
/// Persists the file to the filesystem under `./data/{user_id}/{uuid}/{file_name}`.
/// Persists the file to the filesystem under `{data_dir}/{user_id}/{uuid}/{file_name}`.
///
/// # Arguments
/// * `uuid` - The UUID of the file.
/// * `file` - The temporary file to persist.
/// * `file_name` - The sanitized file name.
/// * `user-id` - User id
/// * `config` - Application configuration containing data directory path
///
/// # Returns
/// * `Result<PathBuf, FileError>` - The persisted file path or an error.
@@ -176,8 +181,18 @@ impl FileInfo {
file: NamedTempFile,
file_name: &str,
user_id: &str,
config: &AppConfig,
) -> Result<PathBuf, FileError> {
let base_dir = Path::new("./data");
info!("Data dir: {:?}", config.data_dir);
// Convert relative path to absolute path
let base_dir = if config.data_dir.starts_with('/') {
Path::new(&config.data_dir).to_path_buf()
} else {
std::env::current_dir()
.map_err(FileError::Io)?
.join(&config.data_dir)
};
let user_dir = base_dir.join(user_id); // Create the user directory
let uuid_dir = user_dir.join(uuid.to_string()); // Create the UUID directory under the user directory
@@ -190,9 +205,11 @@ impl FileInfo {
let final_path = uuid_dir.join(file_name);
info!("Final path: {:?}", final_path);
// Persist the temporary file to the final path
file.persist(&final_path)?;
info!("Persisted file to {:?}", final_path);
// Copy the temporary file to the final path
tokio::fs::copy(file.path(), &final_path)
.await
.map_err(FileError::Io)?;
info!("Copied file to {:?}", final_path);
Ok(final_path)
}
@@ -308,6 +325,116 @@ mod tests {
field_data
}
#[tokio::test]
async fn test_cross_filesystem_file_operations() {
// Setup in-memory database for testing
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
// Create a test file
let content = b"This is a test file for cross-filesystem operations";
let file_name = "cross_fs_test.txt";
let field_data = create_test_file(content, file_name);
// Create a FileInfo instance with data_dir in /tmp
let user_id = "test_user";
let config = AppConfig {
data_dir: "/tmp/minne_test_data".to_string(), // Using /tmp which is typically on a different filesystem
openai_api_key: "test_key".to_string(),
surrealdb_address: "test_address".to_string(),
surrealdb_username: "test_user".to_string(),
surrealdb_password: "test_pass".to_string(),
surrealdb_namespace: "test_ns".to_string(),
surrealdb_database: "test_db".to_string(),
};
// Test file creation
let file_info = FileInfo::new(field_data, &db, user_id, &config)
.await
.expect("Failed to create file across filesystems");
// Verify the file exists and has correct content
let file_path = Path::new(&file_info.path);
assert!(file_path.exists(), "File should exist at {:?}", file_path);
let file_content = tokio::fs::read_to_string(file_path)
.await
.expect("Failed to read file content");
assert_eq!(file_content, String::from_utf8_lossy(content));
// Test file reading
let retrieved = FileInfo::get_by_id(&file_info.id, &db)
.await
.expect("Failed to retrieve file info");
assert_eq!(retrieved.id, file_info.id);
assert_eq!(retrieved.sha256, file_info.sha256);
// Test file deletion
FileInfo::delete_by_id(&file_info.id, &db)
.await
.expect("Failed to delete file");
assert!(!file_path.exists(), "File should be deleted");
// Clean up the test directory
let _ = tokio::fs::remove_dir_all(&config.data_dir).await;
}
#[tokio::test]
async fn test_cross_filesystem_duplicate_detection() {
// Setup in-memory database for testing
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
// Create a test file
let content = b"This is a test file for cross-filesystem duplicate detection";
let file_name = "cross_fs_duplicate.txt";
let field_data = create_test_file(content, file_name);
// Create a FileInfo instance with data_dir in /tmp
let user_id = "test_user";
let config = AppConfig {
data_dir: "/tmp/minne_test_data".to_string(),
openai_api_key: "test_key".to_string(),
surrealdb_address: "test_address".to_string(),
surrealdb_username: "test_user".to_string(),
surrealdb_password: "test_pass".to_string(),
surrealdb_namespace: "test_ns".to_string(),
surrealdb_database: "test_db".to_string(),
};
// Store the original file
let original_file_info = FileInfo::new(field_data, &db, user_id, &config)
.await
.expect("Failed to create original file");
// Create another file with the same content but different name
let duplicate_name = "cross_fs_duplicate_2.txt";
let field_data2 = create_test_file(content, duplicate_name);
// The system should detect it's the same file and return the original FileInfo
let duplicate_file_info = FileInfo::new(field_data2, &db, user_id, &config)
.await
.expect("Failed to process duplicate file");
// Verify duplicate detection worked
assert_eq!(duplicate_file_info.id, original_file_info.id);
assert_eq!(duplicate_file_info.sha256, original_file_info.sha256);
assert_eq!(duplicate_file_info.file_name, file_name);
assert_ne!(duplicate_file_info.file_name, duplicate_name);
// Clean up
FileInfo::delete_by_id(&original_file_info.id, &db)
.await
.expect("Failed to delete file");
let _ = tokio::fs::remove_dir_all(&config.data_dir).await;
}
#[tokio::test]
async fn test_file_creation() {
// Setup in-memory database for testing
@@ -324,7 +451,16 @@ mod tests {
// Create a FileInfo instance
let user_id = "test_user";
let file_info = FileInfo::new(field_data, &db, user_id).await;
let config = AppConfig {
data_dir: "./data".to_string(),
openai_api_key: "test_key".to_string(),
surrealdb_address: "test_address".to_string(),
surrealdb_username: "test_user".to_string(),
surrealdb_password: "test_pass".to_string(),
surrealdb_namespace: "test_ns".to_string(),
surrealdb_database: "test_db".to_string(),
};
let file_info = FileInfo::new(field_data, &db, user_id, &config).await;
// We can't fully test persistence to disk in unit tests,
// but we can verify the database record was created
@@ -364,8 +500,18 @@ mod tests {
let file_name = "original.txt";
let user_id = "test_user";
let config = AppConfig {
data_dir: "./data".to_string(),
openai_api_key: "test_key".to_string(),
surrealdb_address: "test_address".to_string(),
surrealdb_username: "test_user".to_string(),
surrealdb_password: "test_pass".to_string(),
surrealdb_namespace: "test_ns".to_string(),
surrealdb_database: "test_db".to_string(),
};
let field_data1 = create_test_file(content, file_name);
let original_file_info = FileInfo::new(field_data1, &db, user_id)
let original_file_info = FileInfo::new(field_data1, &db, user_id, &config)
.await
.expect("Failed to create original file");
@@ -374,7 +520,7 @@ mod tests {
let field_data2 = create_test_file(content, duplicate_name);
// The system should detect it's the same file and return the original FileInfo
let duplicate_file_info = FileInfo::new(field_data2, &db, user_id)
let duplicate_file_info = FileInfo::new(field_data2, &db, user_id, &config)
.await
.expect("Failed to process duplicate file");
@@ -645,8 +791,6 @@ mod tests {
assert_eq!(retrieved_info.file_name, original_file_info.file_name);
assert_eq!(retrieved_info.path, original_file_info.path);
assert_eq!(retrieved_info.mime_type, original_file_info.mime_type);
// Optionally compare timestamps if precision isn't an issue
// assert_eq!(retrieved_info.created_at, original_file_info.created_at);
}
#[tokio::test]
@@ -674,4 +818,62 @@ mod tests {
Ok(_) => panic!("Expected an error, but got Ok"),
}
}
#[tokio::test]
async fn test_data_directory_configuration() {
// Setup in-memory database for testing
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
// Create a test file
let content = b"This is a test file for data directory configuration";
let file_name = "data_dir_test.txt";
let field_data = create_test_file(content, file_name);
// Create a FileInfo instance with a custom data directory
let user_id = "test_user";
let custom_data_dir = "/tmp/minne_custom_data_dir";
let config = AppConfig {
data_dir: custom_data_dir.to_string(),
openai_api_key: "test_key".to_string(),
surrealdb_address: "test_address".to_string(),
surrealdb_username: "test_user".to_string(),
surrealdb_password: "test_pass".to_string(),
surrealdb_namespace: "test_ns".to_string(),
surrealdb_database: "test_db".to_string(),
};
// Test file creation
let file_info = FileInfo::new(field_data, &db, user_id, &config)
.await
.expect("Failed to create file in custom data directory");
// Verify the file exists in the correct location
let file_path = Path::new(&file_info.path);
assert!(file_path.exists(), "File should exist at {:?}", file_path);
// Verify the file is in the correct data directory
assert!(
file_path.starts_with(custom_data_dir),
"File should be stored in the custom data directory"
);
// Verify the file has the correct content
let file_content = tokio::fs::read_to_string(file_path)
.await
.expect("Failed to read file content");
assert_eq!(file_content, String::from_utf8_lossy(content));
// Test file deletion
FileInfo::delete_by_id(&file_info.id, &db)
.await
.expect("Failed to delete file");
assert!(!file_path.exists(), "File should be deleted");
// Clean up the test directory
let _ = tokio::fs::remove_dir_all(custom_data_dir).await;
}
}

View File

@@ -9,6 +9,12 @@ pub struct AppConfig {
pub surrealdb_password: String,
pub surrealdb_namespace: String,
pub surrealdb_database: String,
// #[serde(default = "default_data_dir")]
pub data_dir: String,
}
fn default_data_dir() -> String {
"./data".to_string()
}
pub fn get_config() -> Result<AppConfig, ConfigError> {

File diff suppressed because one or more lines are too long

View File

@@ -1,6 +1,6 @@
use common::storage::db::SurrealDbClient;
use common::utils::template_engine::{ProvidesTemplateEngine, TemplateEngine};
use common::{create_template_engine, storage::db::ProvidesDb};
use common::{create_template_engine, storage::db::ProvidesDb, utils::config::AppConfig};
use std::sync::Arc;
use tracing::debug;
@@ -12,6 +12,7 @@ pub struct HtmlState {
pub openai_client: Arc<OpenAIClientType>,
pub templates: Arc<TemplateEngine>,
pub session_store: Arc<SessionStoreType>,
pub config: AppConfig,
}
impl HtmlState {
@@ -19,6 +20,7 @@ impl HtmlState {
db: Arc<SurrealDbClient>,
openai_client: Arc<OpenAIClientType>,
session_store: Arc<SessionStoreType>,
config: AppConfig,
) -> Result<Self, Box<dyn std::error::Error>> {
let template_engine = create_template_engine!("templates");
debug!("Template engine created for html_router.");
@@ -28,6 +30,7 @@ impl HtmlState {
openai_client,
session_store,
templates: Arc::new(template_engine),
config,
})
}
}

View File

@@ -39,7 +39,7 @@ pub async fn index_handler(
auth: AuthSessionType,
) -> Result<impl IntoResponse, HtmlError> {
let Some(user) = auth.current_user else {
return Ok(TemplateResponse::redirect("/"));
return Ok(TemplateResponse::redirect("/signin"));
};
let active_jobs = User::get_unfinished_ingestion_tasks(&user.id, &state.db).await?;

View File

@@ -14,6 +14,7 @@ use common::{
file_info::FileInfo, ingestion_payload::IngestionPayload, ingestion_task::IngestionTask,
user::User,
},
utils::config::AppConfig,
};
use crate::{
@@ -88,12 +89,9 @@ pub async fn process_ingress_form(
info!("{:?}", input);
let file_infos = try_join_all(
input
.files
.into_iter()
.map(|file| FileInfo::new(file, &state.db, &user.id).map_err(AppError::from)),
)
let file_infos = try_join_all(input.files.into_iter().map(|file| {
FileInfo::new(file, &state.db, &user.id, &state.config).map_err(AppError::from)
}))
.await?;
let payloads = IngestionPayload::create_ingestion_payload(

View File

@@ -1,6 +1,6 @@
<div class="columns-1 lg:columns-2 2xl:columns-3 gap-4" id="text_content_cards">
<div class="columns-1 md:columns-2 2xl:columns-3 gap-4" id="text_content_cards">
{% for text_content in text_contents %}
<div class="card cursor-pointer min-w-72 mb-4 bg-base-100 shadow break-inside-avoid-column"
<div class="card cursor-pointer mb-4 bg-base-100 shadow break-inside-avoid-column"
hx-get="/content/{{ text_content.id }}/read" hx-target="#modal" hx-swap="innerHTML">
{% if text_content.url_info %}
<figure>

View File

@@ -1,7 +1,7 @@
<nav class="bg-base-200 sticky top-0 z-10">
<div class="container mx-auto navbar">
<div class="flex-1">
<a class="text-2xl p-2 text-primary font-bold" href="/" hx-boost="true">Minne</a>
<a class="text-2xl text-primary font-bold" href="/" hx-boost="true">Minne</a>
</div>
<div class="flex-none">
<ul class="menu menu-horizontal px-2 items-center">

View File

@@ -13,7 +13,7 @@
<div class="drawer-side z-20">
<label for="my-drawer" aria-label="close sidebar" class="drawer-overlay"></label>
<ul class="menu p-0 w-64 h-full bg-base-200 text-base-content flex flex-col">
<ul class="menu p-0 w-72 h-full bg-base-200 text-base-content flex flex-col">
<!-- === TOP FIXED SECTION === -->
<div class="px-2 mt-14">

View File

@@ -16,7 +16,7 @@ use common::{
text_content::TextContent,
},
},
utils::embedding::generate_embedding,
utils::{config::AppConfig, embedding::generate_embedding},
};
use crate::{
@@ -27,14 +27,20 @@ use crate::{
pub struct IngestionPipeline {
db: Arc<SurrealDbClient>,
openai_client: Arc<async_openai::Client<async_openai::config::OpenAIConfig>>,
config: AppConfig,
}
impl IngestionPipeline {
pub async fn new(
db: Arc<SurrealDbClient>,
openai_client: Arc<async_openai::Client<async_openai::config::OpenAIConfig>>,
config: AppConfig,
) -> Result<Self, AppError> {
Ok(Self { db, openai_client })
Ok(Self {
db,
openai_client,
config,
})
}
pub async fn process_task(&self, task: IngestionTask) -> Result<(), AppError> {
let current_attempts = match task.status {
@@ -53,7 +59,7 @@ impl IngestionPipeline {
)
.await?;
let text_content = to_text_content(task.content, &self.db).await?;
let text_content = to_text_content(task.content, &self.db, &self.config).await?;
match self.process(&text_content).await {
Ok(_) => {

View File

@@ -14,6 +14,7 @@ use common::{
ingestion_payload::IngestionPayload,
text_content::{TextContent, UrlInfo},
},
utils::config::AppConfig,
};
use dom_smoothie::{Article, Readability, TextMode};
use headless_chrome::Browser;
@@ -24,6 +25,7 @@ use tracing::{error, info};
pub async fn to_text_content(
ingestion_payload: IngestionPayload,
db: &SurrealDbClient,
config: &AppConfig,
) -> Result<TextContent, AppError> {
match ingestion_payload {
IngestionPayload::Url {
@@ -32,7 +34,7 @@ pub async fn to_text_content(
category,
user_id,
} => {
let (article, file_info) = fetch_article_from_url(&url, db, &user_id).await?;
let (article, file_info) = fetch_article_from_url(&url, db, &user_id, &config).await?;
Ok(TextContent::new(
article.text_content.into(),
Some(context),
@@ -101,6 +103,7 @@ async fn fetch_article_from_url(
url: &str,
db: &SurrealDbClient,
user_id: &str,
config: &AppConfig,
) -> Result<(Article, FileInfo), AppError> {
info!("Fetching URL: {}", url);
// Instantiate timer
@@ -173,7 +176,7 @@ async fn fetch_article_from_url(
};
// Store screenshot
let file_info = FileInfo::new(field_data, db, user_id).await?;
let file_info = FileInfo::new(field_data, db, user_id, &config).await?;
// Parse content...
let config = dom_smoothie::Config {

View File

@@ -41,10 +41,12 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
async_openai::config::OpenAIConfig::new().with_api_key(&config.openai_api_key),
));
let html_state = HtmlState::new_with_resources(db, openai_client, session_store)?;
let html_state =
HtmlState::new_with_resources(db, openai_client, session_store, config.clone())?;
let api_state = ApiState {
db: html_state.db.clone(),
config: config.clone(),
};
// Create Axum router
@@ -93,7 +95,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
// Initialize worker components
let openai_client = Arc::new(async_openai::Client::new());
let ingestion_pipeline = Arc::new(
IngestionPipeline::new(worker_db.clone(), openai_client.clone())
IngestionPipeline::new(worker_db.clone(), openai_client.clone(), config.clone())
.await
.unwrap(),
);

View File

@@ -39,10 +39,12 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
async_openai::config::OpenAIConfig::new().with_api_key(&config.openai_api_key),
));
let html_state = HtmlState::new_with_resources(db, openai_client, session_store)?;
let html_state =
HtmlState::new_with_resources(db, openai_client, session_store, config.clone())?;
let api_state = ApiState {
db: html_state.db.clone(),
config: config.clone(),
};
// Create Axum router

View File

@@ -29,7 +29,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
let openai_client = Arc::new(async_openai::Client::new());
let ingestion_pipeline =
Arc::new(IngestionPipeline::new(db.clone(), openai_client.clone()).await?);
Arc::new(IngestionPipeline::new(db.clone(), openai_client.clone(), config).await?);
run_worker_loop(db, ingestion_pipeline).await
}