passed wide smoke check

This commit is contained in:
Per Stark
2025-12-10 13:54:08 +01:00
parent 2e2ea0c4ff
commit a5bc72aedf
12 changed files with 403 additions and 235 deletions

View File

@@ -97,26 +97,6 @@ impl SurrealDbClient {
Ok(())
}
/// Operation to rebuild indexes
pub async fn rebuild_indexes(&self) -> Result<(), AppError> {
debug!("Rebuilding indexes");
let rebuild_sql = r#"
REBUILD INDEX IF EXISTS text_content_fts_idx ON text_content;
REBUILD INDEX IF EXISTS knowledge_entity_fts_name_idx ON knowledge_entity;
REBUILD INDEX IF EXISTS knowledge_entity_fts_description_idx ON knowledge_entity;
REBUILD INDEX IF EXISTS text_chunk_fts_chunk_idx ON text_chunk;
REBUILD INDEX IF EXISTS idx_embedding_text_chunk_embedding ON text_chunk_embedding;
REBUILD INDEX IF EXISTS idx_embedding_knowledge_entity_embedding ON knowledge_entity_embedding;
"#;
self.client
.query(rebuild_sql)
.await
.map_err(|e| AppError::InternalError(e.to_string()))?;
Ok(())
}
/// Operation to store a object in SurrealDB, requires the struct to implement StoredObject
///
/// # Arguments

View File

@@ -1,19 +1,9 @@
#![allow(
clippy::missing_docs_in_private_items,
clippy::module_name_repetitions,
clippy::items_after_statements,
clippy::arithmetic_side_effects,
clippy::cast_precision_loss,
clippy::redundant_closure_for_method_calls,
clippy::single_match_else,
clippy::uninlined_format_args
)]
use std::time::Duration;
use anyhow::{Context, Result};
use futures::future::try_join_all;
use serde::Deserialize;
use serde_json::Value;
use serde_json::{Map, Value};
use tracing::{debug, info, warn};
use crate::{error::AppError, storage::db::SurrealDbClient};
@@ -28,6 +18,82 @@ struct HnswIndexSpec {
options: &'static str,
}
const fn hnsw_index_specs() -> [HnswIndexSpec; 2] {
[
HnswIndexSpec {
index_name: "idx_embedding_text_chunk_embedding",
table: "text_chunk_embedding",
options: "DIST COSINE TYPE F32 EFC 100 M 8 CONCURRENTLY",
},
HnswIndexSpec {
index_name: "idx_embedding_knowledge_entity_embedding",
table: "knowledge_entity_embedding",
options: "DIST COSINE TYPE F32 EFC 100 M 8 CONCURRENTLY",
},
]
}
const fn fts_index_specs() -> [FtsIndexSpec; 8] {
[
FtsIndexSpec {
index_name: "text_content_fts_idx",
table: "text_content",
field: "text",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
FtsIndexSpec {
index_name: "text_content_context_fts_idx",
table: "text_content",
field: "context",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
FtsIndexSpec {
index_name: "text_content_file_name_fts_idx",
table: "text_content",
field: "file_info.file_name",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
FtsIndexSpec {
index_name: "text_content_url_fts_idx",
table: "text_content",
field: "url_info.url",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
FtsIndexSpec {
index_name: "text_content_url_title_fts_idx",
table: "text_content",
field: "url_info.title",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
FtsIndexSpec {
index_name: "knowledge_entity_fts_name_idx",
table: "knowledge_entity",
field: "name",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
FtsIndexSpec {
index_name: "knowledge_entity_fts_description_idx",
table: "knowledge_entity",
field: "description",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
FtsIndexSpec {
index_name: "text_chunk_fts_chunk_idx",
table: "text_chunk",
field: "chunk",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
]
}
impl HnswIndexSpec {
fn definition_if_not_exists(&self, dimension: usize) -> String {
format!(
@@ -75,6 +141,20 @@ impl FtsIndexSpec {
field = self.field,
)
}
fn overwrite_definition(&self) -> String {
let analyzer_clause = self
.analyzer
.map(|analyzer| format!(" SEARCH ANALYZER {analyzer} {}", self.method))
.unwrap_or_default();
format!(
"DEFINE INDEX OVERWRITE {index} ON TABLE {table} FIELDS {field}{analyzer_clause} CONCURRENTLY;",
index = self.index_name,
table = self.table,
field = self.field,
)
}
}
/// Build runtime Surreal indexes (FTS + HNSW) using concurrent creation with readiness polling.
@@ -88,6 +168,13 @@ pub async fn ensure_runtime_indexes(
.map_err(|err| AppError::InternalError(err.to_string()))
}
/// Rebuild known FTS and HNSW indexes, skipping any that are not yet defined.
pub async fn rebuild_indexes(db: &SurrealDbClient) -> Result<(), AppError> {
rebuild_indexes_inner(db)
.await
.map_err(|err| AppError::InternalError(err.to_string()))
}
async fn ensure_runtime_indexes_inner(
db: &SurrealDbClient,
embedding_dimension: usize,
@@ -147,32 +234,68 @@ async fn ensure_runtime_indexes_inner(
Ok(())
}
async fn hnsw_index_state(
async fn rebuild_indexes_inner(db: &SurrealDbClient) -> Result<()> {
debug!("Rebuilding indexes with concurrent definitions");
create_fts_analyzer(db).await?;
for spec in fts_index_specs() {
if !index_exists(db, spec.table, spec.index_name).await? {
debug!(
index = spec.index_name,
table = spec.table,
"Skipping FTS rebuild because index is missing"
);
continue;
}
create_index_with_polling(
db,
spec.overwrite_definition(),
spec.index_name,
spec.table,
Some(spec.table),
)
.await?;
}
let hnsw_tasks = hnsw_index_specs().into_iter().map(|spec| async move {
if !index_exists(db, spec.table, spec.index_name).await? {
debug!(
index = spec.index_name,
table = spec.table,
"Skipping HNSW rebuild because index is missing"
);
return Ok(());
}
let Some(dimension) = existing_hnsw_dimension(db, &spec).await? else {
warn!(
index = spec.index_name,
table = spec.table,
"HNSW index missing dimension; skipping rebuild"
);
return Ok(());
};
create_index_with_polling(
db,
spec.definition_overwrite(dimension),
spec.index_name,
spec.table,
Some(spec.table),
)
.await
});
try_join_all(hnsw_tasks).await.map(|_| ())
}
async fn existing_hnsw_dimension(
db: &SurrealDbClient,
spec: &HnswIndexSpec,
expected_dimension: usize,
) -> Result<HnswIndexState> {
let info_query = format!("INFO FOR TABLE {table};", table = spec.table);
let mut response = db
.client
.query(info_query)
.await
.with_context(|| format!("fetching table info for {}", spec.table))?;
let info: surrealdb::Value = response
.take(0)
.context("failed to take table info response")?;
let info_json: Value =
serde_json::to_value(info).context("serializing table info to JSON for parsing")?;
let Some(indexes) = info_json
.get("Object")
.and_then(|o| o.get("indexes"))
.and_then(|i| i.get("Object"))
.and_then(|i| i.as_object())
else {
return Ok(HnswIndexState::Missing);
) -> Result<Option<usize>> {
let Some(indexes) = table_index_definitions(db, spec.table).await? else {
return Ok(None);
};
let Some(definition) = indexes
@@ -180,17 +303,23 @@ async fn hnsw_index_state(
.and_then(|details| details.get("Strand"))
.and_then(|v| v.as_str())
else {
return Ok(HnswIndexState::Missing);
return Ok(None);
};
let Some(current_dimension) = extract_dimension(definition) else {
return Ok(HnswIndexState::Missing);
};
Ok(extract_dimension(definition).and_then(|d| usize::try_from(d).ok()))
}
if current_dimension == expected_dimension as u64 {
Ok(HnswIndexState::Matches)
} else {
Ok(HnswIndexState::Different(current_dimension))
async fn hnsw_index_state(
db: &SurrealDbClient,
spec: &HnswIndexSpec,
expected_dimension: usize,
) -> Result<HnswIndexState> {
match existing_hnsw_dimension(db, spec).await? {
None => Ok(HnswIndexState::Missing),
Some(current_dimension) if current_dimension == expected_dimension => {
Ok(HnswIndexState::Matches)
}
Some(current_dimension) => Ok(HnswIndexState::Different(current_dimension as u64)),
}
}
@@ -492,7 +621,10 @@ async fn count_table_rows(db: &SurrealDbClient, table: &str) -> Result<u64> {
Ok(rows.first().map_or(0, |r| r.count))
}
async fn index_exists(db: &SurrealDbClient, table: &str, index_name: &str) -> Result<bool> {
async fn table_index_definitions(
db: &SurrealDbClient,
table: &str,
) -> Result<Option<Map<String, Value>>> {
let info_query = format!("INFO FOR TABLE {table};");
let mut response = db
.client
@@ -507,94 +639,22 @@ async fn index_exists(db: &SurrealDbClient, table: &str, index_name: &str) -> Re
let info_json: Value =
serde_json::to_value(info).context("serializing table info to JSON for parsing")?;
let Some(indexes) = info_json
Ok(info_json
.get("Object")
.and_then(|o| o.get("indexes"))
.and_then(|i| i.get("Object"))
.and_then(|i| i.as_object())
else {
.cloned())
}
async fn index_exists(db: &SurrealDbClient, table: &str, index_name: &str) -> Result<bool> {
let Some(indexes) = table_index_definitions(db, table).await? else {
return Ok(false);
};
Ok(indexes.contains_key(index_name))
}
const fn hnsw_index_specs() -> [HnswIndexSpec; 2] {
[
HnswIndexSpec {
index_name: "idx_embedding_text_chunk_embedding",
table: "text_chunk_embedding",
options: "DIST COSINE TYPE F32 EFC 100 M 8 CONCURRENTLY",
},
HnswIndexSpec {
index_name: "idx_embedding_knowledge_entity_embedding",
table: "knowledge_entity_embedding",
options: "DIST COSINE TYPE F32 EFC 100 M 8 CONCURRENTLY",
},
]
}
const fn fts_index_specs() -> [FtsIndexSpec; 8] {
[
FtsIndexSpec {
index_name: "text_content_fts_idx",
table: "text_content",
field: "text",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
FtsIndexSpec {
index_name: "text_content_context_fts_idx",
table: "text_content",
field: "context",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
FtsIndexSpec {
index_name: "text_content_file_name_fts_idx",
table: "text_content",
field: "file_info.file_name",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
FtsIndexSpec {
index_name: "text_content_url_fts_idx",
table: "text_content",
field: "url_info.url",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
FtsIndexSpec {
index_name: "text_content_url_title_fts_idx",
table: "text_content",
field: "url_info.title",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
FtsIndexSpec {
index_name: "knowledge_entity_fts_name_idx",
table: "knowledge_entity",
field: "name",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
FtsIndexSpec {
index_name: "knowledge_entity_fts_description_idx",
table: "knowledge_entity",
field: "description",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
FtsIndexSpec {
index_name: "text_chunk_fts_chunk_idx",
table: "text_chunk",
field: "chunk",
analyzer: Some(FTS_ANALYZER_NAME),
method: "BM25",
},
]
}
#[cfg(test)]
mod tests {
use super::*;

View File

@@ -17,8 +17,11 @@ pub type DynStore = Arc<dyn ObjectStore>;
/// Storage manager with persistent state and proper lifecycle management.
#[derive(Clone)]
pub struct StorageManager {
// Store from objectstore wrapped as dyn
store: DynStore,
// Simple enum to track which kind
backend_kind: StorageKind,
// Where on disk
local_base: Option<PathBuf>,
}

View File

@@ -61,36 +61,34 @@ impl TextChunk {
embedding: Vec<f32>,
db: &SurrealDbClient,
) -> Result<(), AppError> {
let emb = TextChunkEmbedding::new(
&chunk.id,
chunk.source_id.clone(),
embedding,
chunk.user_id.clone(),
);
let chunk_id = chunk.id.clone();
let source_id = chunk.source_id.clone();
let user_id = chunk.user_id.clone();
// Create both records in a single query
let query = format!(
"
BEGIN TRANSACTION;
CREATE type::thing('{chunk_table}', $chunk_id) CONTENT $chunk;
CREATE type::thing('{emb_table}', $emb_id) CONTENT $emb;
COMMIT TRANSACTION;
",
chunk_table = Self::table_name(),
emb_table = TextChunkEmbedding::table_name(),
);
let emb = TextChunkEmbedding::new(&chunk_id, source_id.clone(), embedding, user_id.clone());
db.client
.query(query)
.bind(("chunk_id", chunk.id.clone()))
// Create both records in a single transaction so we don't orphan embeddings or chunks
let response = db
.client
.query("BEGIN TRANSACTION;")
.query(format!(
"CREATE type::thing('{chunk_table}', $chunk_id) CONTENT $chunk;",
chunk_table = Self::table_name(),
))
.query(format!(
"CREATE type::thing('{emb_table}', $emb_id) CONTENT $emb;",
emb_table = TextChunkEmbedding::table_name(),
))
.query("COMMIT TRANSACTION;")
.bind(("chunk_id", chunk_id.clone()))
.bind(("chunk", chunk))
.bind(("emb_id", emb.id.clone()))
.bind(("emb", emb))
.await
.map_err(AppError::Database)?
.check()
.map_err(AppError::Database)?;
response.check().map_err(AppError::Database)?;
Ok(())
}
@@ -330,6 +328,7 @@ impl TextChunk {
#[cfg(test)]
mod tests {
use super::*;
use crate::storage::indexes::{ensure_runtime_indexes, rebuild_indexes};
use crate::storage::types::text_chunk_embedding::TextChunkEmbedding;
use surrealdb::RecordId;
use uuid::Uuid;
@@ -524,6 +523,46 @@ mod tests {
assert_eq!(embedding.source_id, source_id);
}
#[tokio::test]
async fn test_store_with_embedding_with_runtime_indexes() {
let namespace = "test_ns_runtime";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
db.apply_migrations().await.expect("migrations");
// Ensure runtime indexes are built with the expected dimension.
let embedding_dimension = 3usize;
ensure_runtime_indexes(&db, embedding_dimension)
.await
.expect("ensure runtime indexes");
let chunk = TextChunk::new(
"runtime_src".to_string(),
"runtime chunk body".to_string(),
"runtime_user".to_string(),
);
TextChunk::store_with_embedding(chunk.clone(), vec![0.1, 0.2, 0.3], &db)
.await
.expect("store with embedding");
let stored_chunk: Option<TextChunk> = db.get_item(&chunk.id).await.unwrap();
assert!(stored_chunk.is_some(), "chunk should be stored");
let rid = RecordId::from_table_key(TextChunk::table_name(), &chunk.id);
let embedding = TextChunkEmbedding::get_by_chunk_id(&rid, &db)
.await
.expect("get embedding");
assert!(embedding.is_some(), "embedding should exist");
assert_eq!(
embedding.unwrap().embedding.len(),
embedding_dimension,
"embedding dimension should match runtime index"
);
}
#[tokio::test]
async fn test_vector_search_returns_empty_when_no_embeddings() {
let namespace = "test_ns";
@@ -625,7 +664,7 @@ mod tests {
.expect("Failed to start in-memory surrealdb");
db.apply_migrations().await.expect("migrations");
ensure_chunk_fts_index(&db).await;
db.rebuild_indexes().await.expect("rebuild indexes");
rebuild_indexes(&db).await.expect("rebuild indexes");
let results = TextChunk::fts_search(5, "hello", &db, "user")
.await
@@ -651,7 +690,7 @@ mod tests {
user_id.to_string(),
);
db.store_item(chunk.clone()).await.expect("store chunk");
db.rebuild_indexes().await.expect("rebuild indexes");
rebuild_indexes(&db).await.expect("rebuild indexes");
let results = TextChunk::fts_search(3, "rust", &db, user_id)
.await
@@ -698,7 +737,7 @@ mod tests {
db.store_item(other_user_chunk)
.await
.expect("store other user chunk");
db.rebuild_indexes().await.expect("rebuild indexes");
rebuild_indexes(&db).await.expect("rebuild indexes");
let results = TextChunk::fts_search(3, "apple", &db, user_id)
.await

View File

@@ -20,8 +20,8 @@ use crate::{
#[allow(clippy::module_name_repetitions)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum EmbeddingBackend {
OpenAI,
#[default]
OpenAI,
FastEmbed,
Hashed,
}
@@ -276,9 +276,7 @@ fn bucket(token: &str, dimension: usize) -> usize {
let safe_dimension = dimension.max(1);
let mut hasher = DefaultHasher::new();
token.hash(&mut hasher);
usize::try_from(hasher.finish())
.unwrap_or_default()
% safe_dimension
usize::try_from(hasher.finish()).unwrap_or_default() % safe_dimension
}
// Backward compatibility function