37 Commits
v0.2.3 ... main

Author SHA1 Message Date
Per Stark
a2c9bb848d release: 0.2.7 2025-12-04 12:25:46 +01:00
Per Stark
04ee225732 design: improved admin page, new structure 2025-11-04 20:42:24 +01:00
Per Stark
13b7ad6f3a fix: added cargo lock to crane build 2025-11-04 12:59:32 +01:00
Per Stark
112a6965a4 Merge branch 'main' into development 2025-11-03 12:48:04 +01:00
Per Stark
911e830be5 Merge branch 'development' of github.com:perstarkse/minne into development 2025-11-03 12:40:36 +01:00
Per Stark
3196e65172 fix: improved storage manager, prep for s3 2025-11-03 12:39:15 +01:00
Per Stark
380c900c86 release: 0.2.6
dist update

fix new workflow

fix

mkdir

moved to dist

fix only dir

dont verify sha files

fix verify ci part

fix

no checking anymore
2025-11-01 21:26:06 +01:00
Per Stark
a99e5ada8b Merge pull request #5 from josephleee/patch-1
Update README.md
2025-10-31 13:40:06 +01:00
Per Stark
b0deabaf3f release: 0.2.6 2025-10-31 13:38:11 +01:00
Joseph
a8f0d9fa88 Update README.md
KaraKeep url is deprecated. link to origin github url
2025-10-30 16:12:39 +09:00
Per Stark
56a1dfddb8 fix: updated docker container for reranking 2025-10-29 12:04:39 +01:00
Per Stark
863b921fb4 fix: updated nix build to work with reranking deps 2025-10-28 22:28:45 +01:00
Per Stark
f13791cfcf fix: better default naming of relationships 2025-10-27 20:46:00 +01:00
Per Stark
75c200b2ba fix: update graph view when changes in knowledge store 2025-10-27 18:22:15 +01:00
Per Stark
1b7c24747a fix: in memory object store handler for testing 2025-10-27 17:03:03 +01:00
Per Stark
241ad9a089 fix: scratchpad tz aware datetime 2025-10-27 14:00:22 +01:00
Per Stark
72578296db feat: reranking with fastembed added 2025-10-27 13:05:10 +01:00
Per Stark
a0e9387c76 docs: updated readme 2025-10-24 23:34:18 +02:00
Per Stark
798b1468b6 release: 0.2.5 2025-10-24 22:13:08 +02:00
Per Stark
3b805778b4 feat: scratchpad
additional improvements

changelog

fix: wording
2025-10-22 22:29:41 +02:00
Per Stark
07b3e1a0e8 refactor: implemented state machine for ingestion pipeline, improved performance
changelog

additional moving around

moved files around a bit
2025-10-20 13:08:00 +02:00
Per Stark
83d39afad4 refactor: implemented state machines for retrieval pipeline, improved tracing 2025-10-19 09:18:17 +02:00
Per Stark
21e4ab1f42 chore: clippy composite retrieval 2025-10-16 20:37:51 +02:00
Per Stark
3c97d8ead5 chore: clippy ingestion-pipeline 2025-10-16 20:36:39 +02:00
Per Stark
ab68bccb80 chore: clippy api-router 2025-10-16 20:33:57 +02:00
Per Stark
99b88c3063 chore: clippy html-router 2025-10-16 20:30:17 +02:00
Per Stark
44e5d8a2fc chore: clippy fixes 2025-10-16 20:29:15 +02:00
Per Stark
7332347f1a feat: quick search knowledge entities 2025-10-16 20:08:01 +02:00
Per Stark
199186e5a3 fix: variable name 2025-10-16 11:24:07 +02:00
Per Stark
64728468cd design: knowledge new entity button to the left 2025-10-16 10:24:33 +02:00
Per Stark
c3a7e8dc59 chore: clippy performance improvements 2025-10-15 22:24:59 +02:00
Per Stark
35ff4e1464 feat: manual entity creation
chore: clippy
2025-10-15 21:50:52 +02:00
Per Stark
2964f1a5a5 release: 0.2.4 2025-10-15 09:09:35 +02:00
Per Stark
cb7f625b81 fix: score normalization for vector search 2025-10-14 21:13:58 +02:00
Per Stark
dc40cf7663 feat: hybrid search 2025-10-14 20:38:43 +02:00
Per Stark
aa0b1462a1 feat: task archive
fix: simplified
2025-10-14 10:38:09 +02:00
Per Stark
41fc7bb99c feat: state machine for tasks, multiple workers 2025-10-12 22:21:20 +02:00
115 changed files with 11334 additions and 2706 deletions

49
.github/build-setup.yml vendored Normal file
View File

@@ -0,0 +1,49 @@
- name: Prepare lib dir
run: mkdir -p lib
# Linux
- name: Fetch ONNX Runtime (Linux)
if: runner.os == 'Linux'
env:
ORT_VER: 1.22.0
run: |
set -euo pipefail
ARCH="$(uname -m)"
case "$ARCH" in
x86_64) URL="https://github.com/microsoft/onnxruntime/releases/download/v${ORT_VER}/onnxruntime-linux-x64-${ORT_VER}.tgz" ;;
aarch64) URL="https://github.com/microsoft/onnxruntime/releases/download/v${ORT_VER}/onnxruntime-linux-aarch64-${ORT_VER}.tgz" ;;
*) echo "Unsupported arch $ARCH"; exit 1 ;;
esac
curl -fsSL -o ort.tgz "$URL"
tar -xzf ort.tgz
cp -v onnxruntime-*/lib/libonnxruntime.so* lib/
# macOS
- name: Fetch ONNX Runtime (macOS)
if: runner.os == 'macOS'
env:
ORT_VER: 1.22.0
run: |
set -euo pipefail
curl -fsSL -o ort.tgz "https://github.com/microsoft/onnxruntime/releases/download/v${ORT_VER}/onnxruntime-osx-universal2-${ORT_VER}.tgz"
tar -xzf ort.tgz
# copy the main dylib; rename to stable name if needed
cp -v onnxruntime-*/lib/libonnxruntime*.dylib lib/
# optional: ensure a stable name
if [ ! -f lib/libonnxruntime.dylib ]; then
cp -v lib/libonnxruntime*.dylib lib/libonnxruntime.dylib
fi
# Windows
- name: Fetch ONNX Runtime (Windows)
if: runner.os == 'Windows'
shell: pwsh
env:
ORT_VER: 1.22.0
run: |
$url = "https://github.com/microsoft/onnxruntime/releases/download/v$env:ORT_VER/onnxruntime-win-x64-$env:ORT_VER.zip"
Invoke-WebRequest $url -OutFile ort.zip
Expand-Archive ort.zip -DestinationPath ort
$dll = Get-ChildItem -Recurse -Path ort -Filter onnxruntime.dll | Select-Object -First 1
Copy-Item $dll.FullName lib\onnxruntime.dll

View File

@@ -1,44 +1,8 @@
# This file was autogenerated by dist: https://opensource.axo.dev/cargo-dist/
#
# Copyright 2022-2024, axodotdev
# SPDX-License-Identifier: MIT or Apache-2.0
#
# CI that:
#
# * checks for a Git Tag that looks like a release
# * builds artifacts with dist (archives, installers, hashes)
# * uploads those artifacts to temporary workflow zip
# * on success, uploads the artifacts to a GitHub Release
#
# Note that the GitHub Release will be created with a generated
# title/body based on your changelogs.
name: Release
permissions:
"contents": "write"
"packages": "write"
contents: write
packages: write
# This task will run whenever you push a git tag that looks like a version
# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc.
# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where
# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
#
# If PACKAGE_NAME is specified, then the announcement will be for that
# package (erroring out if it doesn't have the given version or isn't dist-able).
#
# If PACKAGE_NAME isn't specified, then the announcement will be for all
# (dist-able) packages in the workspace with that version (this mode is
# intended for workspaces with only one dist-able package, or with all dist-able
# packages versioned/released in lockstep).
#
# If you push multiple tags at once, separate instances of this workflow will
# spin up, creating an independent announcement for each one. However, GitHub
# will hard limit this to 3 tags per commit, as it will assume more tags is a
# mistake.
#
# If there's a prerelease-style suffix to the version, then the release(s)
# will be marked as a prerelease.
on:
pull_request:
push:
@@ -46,9 +10,8 @@ on:
- '**[0-9]+.[0-9]+.[0-9]+*'
jobs:
# Run 'dist plan' (or host) to determine what tasks we need to do
plan:
runs-on: "ubuntu-22.04"
runs-on: ubuntu-22.04
outputs:
val: ${{ steps.plan.outputs.manifest }}
tag: ${{ !github.event.pull_request && github.ref_name || '' }}
@@ -60,52 +23,36 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install dist
# we specify bash to get pipefail; it guards against the `curl` command
# failing. otherwise `sh` won't catch that `curl` returned non-0
shell: bash
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.28.0/cargo-dist-installer.sh | sh"
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.0/cargo-dist-installer.sh | sh"
- name: Cache dist
uses: actions/upload-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/dist
# sure would be cool if github gave us proper conditionals...
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
# functionality based on whether this is a pull_request, and whether it's from a fork.
# (PRs run on the *source* but secrets are usually on the *target* -- that's *good*
# but also really annoying to build CI around when it needs secrets to work right.)
- id: plan
run: |
dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json
echo "dist ran successfully"
cat plan-dist-manifest.json
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
echo "manifest=$(jq -c . plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: Upload dist-manifest.json
uses: actions/upload-artifact@v4
with:
name: artifacts-plan-dist-manifest
path: plan-dist-manifest.json
# Build and packages all the platform-specific things
build-local-artifacts:
name: build-local-artifacts (${{ join(matrix.targets, ', ') }})
# Let the initial task tell us to not run (currently very blunt)
needs:
- plan
needs: [plan]
if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }}
strategy:
fail-fast: false
# Target platforms/runners are computed by dist in create-release.
# Each member of the matrix has the following arguments:
#
# - runner: the github runner
# - dist-args: cli flags to pass to dist
# - install-dist: expression to run to install dist on the runner
#
# Typically there will be:
# - 1 "global" task that builds universal installers
# - N "local" tasks that build each platform's binaries and platform-specific installers
matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }}
runs-on: ${{ matrix.runner }}
container: ${{ matrix.container && matrix.container.image || null }}
@@ -114,11 +61,12 @@ jobs:
BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json
steps:
- name: enable windows longpaths
run: |
git config --global core.longpaths true
run: git config --global core.longpaths true
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install Rust non-interactively if not already installed
if: ${{ matrix.container }}
run: |
@@ -126,37 +74,103 @@ jobs:
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
fi
- name: Install dist
run: ${{ matrix.install_dist.run }}
# Get the dist-manifest
- name: Fetch local artifacts
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
merge-multiple: true
# ===== BEGIN: Injected ORT staging for cargo-dist bundling =====
- run: echo "=== BUILD-SETUP START ==="
# Unix shells
- name: Prepare lib dir (Unix)
if: runner.os != 'Windows'
shell: bash
run: |
mkdir -p lib
rm -f lib/*
# Windows PowerShell
- name: Prepare lib dir (Windows)
if: runner.os == 'Windows'
shell: pwsh
run: |
New-Item -ItemType Directory -Force -Path lib | Out-Null
# remove contents if any
Get-ChildItem -Path lib -Force | Remove-Item -Force -Recurse -ErrorAction SilentlyContinue
- name: Fetch ONNX Runtime (Linux)
if: runner.os == 'Linux'
env:
ORT_VER: 1.22.0
run: |
set -euo pipefail
ARCH="$(uname -m)"
case "$ARCH" in
x86_64) URL="https://github.com/microsoft/onnxruntime/releases/download/v${ORT_VER}/onnxruntime-linux-x64-${ORT_VER}.tgz" ;;
aarch64) URL="https://github.com/microsoft/onnxruntime/releases/download/v${ORT_VER}/onnxruntime-linux-aarch64-${ORT_VER}.tgz" ;;
*) echo "Unsupported arch $ARCH"; exit 1 ;;
esac
curl -fsSL -o ort.tgz "$URL"
tar -xzf ort.tgz
cp -v onnxruntime-*/lib/libonnxruntime.so* lib/
# normalize to stable name if needed
[ -f lib/libonnxruntime.so ] || cp -v lib/libonnxruntime.so.* lib/libonnxruntime.so
- name: Fetch ONNX Runtime (macOS)
if: runner.os == 'macOS'
env:
ORT_VER: 1.22.0
run: |
set -euo pipefail
curl -fsSL -o ort.tgz "https://github.com/microsoft/onnxruntime/releases/download/v${ORT_VER}/onnxruntime-osx-universal2-${ORT_VER}.tgz"
tar -xzf ort.tgz
cp -v onnxruntime-*/lib/libonnxruntime*.dylib lib/
[ -f lib/libonnxruntime.dylib ] || cp -v lib/libonnxruntime*.dylib lib/libonnxruntime.dylib
- name: Fetch ONNX Runtime (Windows)
if: runner.os == 'Windows'
shell: pwsh
env:
ORT_VER: 1.22.0
run: |
$url = "https://github.com/microsoft/onnxruntime/releases/download/v$env:ORT_VER/onnxruntime-win-x64-$env:ORT_VER.zip"
Invoke-WebRequest $url -OutFile ort.zip
Expand-Archive ort.zip -DestinationPath ort
$dll = Get-ChildItem -Recurse -Path ort -Filter onnxruntime.dll | Select-Object -First 1
Copy-Item $dll.FullName lib\onnxruntime.dll
- run: |
echo "=== BUILD-SETUP END ==="
echo "lib/ contents:"
ls -l lib || dir lib
# ===== END: Injected ORT staging =====
- name: Install dependencies
run: |
${{ matrix.packages_install }}
- name: Build artifacts
run: |
# Actually do builds and make zips and whatnot
dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json
echo "dist ran successfully"
- id: cargo-dist
name: Post-build
# We force bash here just because github makes it really hard to get values up
# to "real" actions without writing to env-vars, and writing to env-vars has
# inconsistent syntax between shell and powershell.
shell: bash
run: |
# Parse out what we just built and upload it to scratch storage
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
dist print-upload-files-from-manifest --manifest dist-manifest.json >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
- name: "Upload artifacts"
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: artifacts-build-local-${{ join(matrix.targets, '_') }}
@@ -167,16 +181,16 @@ jobs:
build_and_push_docker_image:
name: Build and Push Docker Image
runs-on: ubuntu-latest
needs: [plan]
if: ${{ needs.plan.outputs.publishing == 'true' }}
needs: [plan]
if: ${{ needs.plan.outputs.publishing == 'true' }}
permissions:
contents: read # Permission to checkout the repository
packages: write # Permission to push Docker image to GHCR
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
submodules: recursive # Matches your other checkout steps
submodules: recursive
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
@@ -185,33 +199,28 @@ jobs:
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }} # User triggering the workflow
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@v5
with:
images: ghcr.io/${{ github.repository }}
# This action automatically uses the Git tag as the Docker image tag.
# For example, a Git tag 'v1.2.3' will result in Docker tag 'ghcr.io/owner/repo:v1.2.3'.
images: ghcr.io/${{ github.repository }}
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: .
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha # Enable Docker layer caching from GitHub Actions cache
cache-to: type=gha,mode=max # Enable Docker layer caching to GitHub Actions cache
cache-from: type=gha
cache-to: type=gha,mode=max
# Build and package all the platform-agnostic(ish) things
build-global-artifacts:
needs:
- plan
- build-local-artifacts
runs-on: "ubuntu-22.04"
needs: [plan, build-local-artifacts]
runs-on: ubuntu-22.04
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
@@ -219,92 +228,90 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install cached dist
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
- name: Fetch local artifacts
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
merge-multiple: true
- id: cargo-dist
shell: bash
run: |
dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
echo "dist ran successfully"
# Parse out what we just built and upload it to scratch storage
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
- name: "Upload artifacts"
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: artifacts-build-global
path: |
${{ steps.cargo-dist.outputs.paths }}
${{ env.BUILD_MANIFEST_NAME }}
# Determines if we should publish/announce
host:
needs:
- plan
- build-local-artifacts
- build-global-artifacts
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
needs: [plan, build-local-artifacts, build-global-artifacts]
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
runs-on: "ubuntu-22.04"
runs-on: ubuntu-22.04
outputs:
val: ${{ steps.host.outputs.manifest }}
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install cached dist
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/dist
# Fetch artifacts from scratch-storage
- name: Fetch artifacts
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
merge-multiple: true
- id: host
shell: bash
run: |
dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
echo "artifacts uploaded and released successfully"
cat dist-manifest.json
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
echo "manifest=$(jq -c . dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: Upload dist-manifest.json
uses: actions/upload-artifact@v4
with:
# Overwrite the previous copy
name: artifacts-dist-manifest
path: dist-manifest.json
# Create a GitHub Release while uploading all files to it
- name: "Download GitHub Artifacts"
- name: Download GitHub Artifacts
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: artifacts
merge-multiple: true
- name: Cleanup
run: |
# Remove the granular manifests
rm -f artifacts/*-dist-manifest.json
run: rm -f artifacts/*-dist-manifest.json
- name: Create GitHub Release
env:
PRERELEASE_FLAG: "${{ fromJson(steps.host.outputs.manifest).announcement_is_prerelease && '--prerelease' || '' }}"
@@ -312,20 +319,13 @@ jobs:
ANNOUNCEMENT_BODY: "${{ fromJson(steps.host.outputs.manifest).announcement_github_body }}"
RELEASE_COMMIT: "${{ github.sha }}"
run: |
# Write and read notes from a file to avoid quoting breaking things
echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt
gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/*
announce:
needs:
- plan
- host
# use "always() && ..." to allow us to wait for all publish jobs while
# still allowing individual publish jobs to skip themselves (for prereleases).
# "host" however must run to completion, no skipping allowed!
needs: [plan, host]
if: ${{ always() && needs.host.result == 'success' }}
runs-on: "ubuntu-22.04"
runs-on: ubuntu-22.04
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:

View File

@@ -1,6 +1,24 @@
# Changelog
## Unreleased
## Version 0.2.7 (2025-12-04)
- Improved admin page, now only loads models when specifically requested. Groundwork for coming configuration features.
- Fix: timezone aware info in scratchpad
## Version 0.2.6 (2025-10-29)
- Added an opt-in FastEmbed-based reranking stage behind `reranking_enabled`. It improves retrieval accuracy by re-scoring hybrid results.
- Fix: default name for relationships harmonized across application
## Version 0.2.5 (2025-10-24)
- Added manual knowledge entity creation flows using a modal, with the option for suggested relationships
- Scratchpad feature, with the feature to convert scratchpads to content.
- Added knowledge entity search results to the global search
- Backend fixes for improved performance when ingesting and retrieval
## Version 0.2.4 (2025-10-15)
- Improved retrieval performance. Ingestion and chat now utilizes full text search, vector comparison and graph traversal.
- Ingestion task archive
## Version 0.2.3 (2025-10-12)
- Fix changing vector dimensions on a fresh database (#3)

986
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -55,7 +55,57 @@ tokio-retry = "0.3.0"
base64 = "0.22.1"
object_store = { version = "0.11.2" }
bytes = "1.7.1"
state-machines = "0.2.0"
fastembed = { version = "5.2.0", default-features = false, features = ["hf-hub-native-tls", "ort-load-dynamic"] }
[profile.dist]
inherits = "release"
lto = "thin"
[workspace.lints.clippy]
# Performance-focused lints
perf = { level = "warn", priority = -1 }
vec_init_then_push = "warn"
large_stack_frames = "warn"
redundant_allocation = "warn"
single_char_pattern = "warn"
string_extend_chars = "warn"
format_in_format_args = "warn"
slow_vector_initialization = "warn"
inefficient_to_string = "warn"
implicit_clone = "warn"
redundant_clone = "warn"
# Security-focused lints
integer_arithmetic = "warn"
indexing_slicing = "warn"
unwrap_used = "warn"
expect_used = "warn"
panic = "warn"
unimplemented = "warn"
todo = "warn"
# Async/Network lints
async_yields_async = "warn"
await_holding_invalid_state = "warn"
rc_buffer = "warn"
# Maintainability-focused lints
cargo = { level = "warn", priority = -1 }
pedantic = { level = "warn", priority = -1 }
clone_on_ref_ptr = "warn"
float_cmp = "warn"
manual_string_new = "warn"
uninlined_format_args = "warn"
unused_self = "warn"
must_use_candidate = "allow"
missing_errors_doc = "allow"
missing_panics_doc = "warn"
module_name_repetitions = "warn"
wildcard_dependencies = "warn"
missing_docs_in_private_items = "warn"
# Allow noisy lints that don't add value for this project
manual_must_use = "allow"
needless_raw_string_hashes = "allow"
multiple_bound_locations = "allow"

View File

@@ -1,7 +1,10 @@
# === Builder Stage ===
FROM clux/muslrust:1.86.0-stable as builder
# === Builder ===
FROM rust:1.86-bookworm AS builder
WORKDIR /usr/src/minne
RUN apt-get update && apt-get install -y --no-install-recommends \
pkg-config clang cmake git && rm -rf /var/lib/apt/lists/*
# Cache deps
COPY Cargo.toml Cargo.lock ./
RUN mkdir -p api-router common composite-retrieval html-router ingestion-pipeline json-stream-parser main worker
COPY api-router/Cargo.toml ./api-router/
@@ -11,43 +14,38 @@ COPY html-router/Cargo.toml ./html-router/
COPY ingestion-pipeline/Cargo.toml ./ingestion-pipeline/
COPY json-stream-parser/Cargo.toml ./json-stream-parser/
COPY main/Cargo.toml ./main/
RUN cargo build --release --bin main --features ingestion-pipeline/docker || true
# Build with the MUSL target
RUN cargo build --release --target x86_64-unknown-linux-musl --bin main --features ingestion-pipeline/docker || true
# Copy the rest of the source code
# Build
COPY . .
RUN cargo build --release --bin main --features ingestion-pipeline/docker
# Build the final application binary with the MUSL target
RUN cargo build --release --target x86_64-unknown-linux-musl --bin main --features ingestion-pipeline/docker
# === Runtime ===
FROM debian:bookworm-slim
# === Runtime Stage ===
FROM alpine:latest
# Chromium + runtime deps + OpenMP for ORT
RUN apt-get update && apt-get install -y --no-install-recommends \
chromium libnss3 libasound2 libgbm1 libxshmfence1 \
ca-certificates fonts-dejavu fonts-noto-color-emoji \
libgomp1 libstdc++6 curl \
&& rm -rf /var/lib/apt/lists/*
RUN apk update && apk add --no-cache \
chromium \
nss \
freetype \
harfbuzz \
ca-certificates \
ttf-freefont \
font-noto-emoji \
&& \
rm -rf /var/cache/apk/*
# ONNX Runtime (CPU). Change if you bump ort.
ARG ORT_VERSION=1.22.0
RUN mkdir -p /opt/onnxruntime && \
curl -fsSL -o /tmp/ort.tgz \
"https://github.com/microsoft/onnxruntime/releases/download/v${ORT_VERSION}/onnxruntime-linux-x64-${ORT_VERSION}.tgz" && \
tar -xzf /tmp/ort.tgz -C /opt/onnxruntime --strip-components=1 && rm /tmp/ort.tgz
ENV CHROME_BIN=/usr/bin/chromium-browser \
CHROME_PATH=/usr/lib/chromium/ \
SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
ENV CHROME_BIN=/usr/bin/chromium \
SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt \
ORT_DYLIB_PATH=/opt/onnxruntime/lib/libonnxruntime.so
# Create a non-root user to run the application
RUN adduser -D -h /home/appuser appuser
WORKDIR /home/appuser
# Non-root
RUN useradd -m appuser
USER appuser
WORKDIR /home/appuser
# Copy the compiled binary from the builder stage (note the target path)
COPY --from=builder /usr/src/minne/target/x86_64-unknown-linux-musl/release/main /usr/local/bin/main
COPY --from=builder /usr/src/minne/target/release/main /usr/local/bin/main
EXPOSE 3000
# EXPOSE 8000-9000
CMD ["main"]

395
README.md
View File

@@ -12,194 +12,142 @@
To test _Minne_ out, enter [this](https://minne-demo.stark.pub) read-only demo deployment to view and test functionality out.
## Noteworthy Features
- **Search & Chat Interface** - Find content or knowledge instantly with full-text search, or use the chat mode and conversational AI to find and reason about content
- **Manual and AI-assisted connections** - Build entities and relationships manually with full control, let AI create entities and relationships automatically, or blend both approaches with AI suggestions for manual approval
- **Hybrid Retrieval System** - Search combining vector similarity, full-text search, and graph traversal for highly relevant results
- **Scratchpad Feature** - Quickly capture thoughts and convert them to permanent content when ready
- **Visual Graph Explorer** - Interactive D3-based navigation of your knowledge entities and connections
- **Multi-Format Support** - Ingest text, URLs, PDFs, audio files, and images into your knowledge base
- **Performance Focus** - Built with Rust and server-side rendering for speed and efficiency
- **Self-Hosted & Privacy-Focused** - Full control over your data, and compatible with any OpenAI-compatible API that supports structured outputs
## The "Why" Behind Minne
For a while I've been fascinated by Zettelkasten-style PKM systems. While tools like Logseq and Obsidian are excellent, I found the manual linking process to be a hindrance for me. I also wanted a centralized storage and easy access across devices.
For a while I've been fascinated by personal knowledge management systems. I wanted something that made it incredibly easy to capture content - snippets of text, URLs, and other media - while automatically discovering connections between ideas. But I also wanted to maintain control over my knowledge structure.
While developing Minne, I discovered [KaraKeep](https://karakeep.com/) (formerly Hoarder), which is an excellent application in a similar space you probably want to check it out! However, if you're interested in a PKM that builds an automatic network between related concepts using AI, offers search and the **possibility to chat with your knowledge resource**, and provides a blend of manual and AI-driven organization, then Minne might be worth testing.
Traditional tools like Logseq and Obsidian are excellent, but the manual linking process often became a hindrance. Meanwhile, fully automated systems sometimes miss important context or create relationships I wouldn't have chosen myself.
## Core Philosophy & Features
So I built Minne to offer the best of both worlds: effortless content capture with AI-assisted relationship discovery, but with the flexibility to manually curate, edit, or override any connections. You can let AI handle the heavy lifting of extracting entities and finding relationships, take full control yourself, or use a hybrid approach where AI suggests connections that you can approve or modify.
Minne is designed to make it incredibly easy to save snippets of text, URLs, and other content (limited, pending demand). Simply send content along with a category tag. Minne then ingests this, leveraging AI to create relevant nodes and relationships within its graph database, alongside your manual categorization. This graph backend allows for discoverable connections between your pieces of knowledge.
While developing Minne, I discovered [KaraKeep](https://github.com/karakeep-app/karakeep) (formerly Hoarder), which is an excellent application in a similar space you probably want to check it out! However, if you're interested in a PKM that offers both intelligent automation and manual curation, with the ability to chat with your knowledge base, then Minne might be worth testing.
You can converse with your knowledge base through an LLM-powered chat interface (via OpenAI compatible API, like Ollama or others). For those who like to see the bigger picture, Minne also includes an feature to visually explore your knowledge graph.
## Table of Contents
You may switch and choose between models used, and have the possiblity to change the prompts to your liking. There is the option to change embeddings length, making it easy to test another embedding model.
- [Quick Start](#quick-start)
- [Features in Detail](#features-in-detail)
- [Configuration](#configuration)
- [Tech Stack](#tech-stack)
- [Application Architecture](#application-architecture)
- [AI Configuration](#ai-configuration--model-selection)
- [Roadmap](#roadmap)
- [Development](#development)
- [Contributing](#contributing)
- [License](#license)
The application is built for speed and efficiency using Rust with a Server-Side Rendered (SSR) frontend (HTMX and minimal JavaScript). It's fully responsive, offering a complete mobile interface for reading, editing, and managing your content, including the graph database itself. **PWA (Progressive Web App) support** means you can "install" Minne to your device for a native-like experience. For quick capture on the go on iOS, a [**Shortcut**](https://www.icloud.com/shortcuts/e433fbd7602f4e2eaa70dca162323477) makes sending content to your Minne instance a breeze.
## Quick Start
Minne is open source (AGPL), self-hostable, and can be deployed flexibly: via Nix, Docker Compose, pre-built binaries, or by building from source. It can run as a single `main` binary or as separate `server` and `worker` processes for optimized resource allocation.
The fastest way to get Minne running is with Docker Compose:
```bash
# Clone the repository
git clone https://github.com/perstarkse/minne.git
cd minne
# Start Minne and its database
docker compose up -d
# Access at http://localhost:3000
```
**Required Setup:**
- Replace `your_openai_api_key_here` in `docker-compose.yml` with your actual API key
- Configure `OPENAI_BASE_URL` if using a custom AI provider (like Ollama)
For detailed installation options, see [Configuration](#configuration).
## Features in Detail
### Search vs. Chat mode
**Search** - Use when you know roughly what you're looking for. Full-text search finds items quickly by matching your query terms.
**Chat Mode** - Use when you want to explore concepts, find connections, or reason about your knowledge. The AI analyzes your query and finds relevant context across your entire knowledge base.
### Content Processing
Minne automatically processes content you save:
1. **Web scraping** extracts readable text from URLs
2. **Text analysis** identifies key concepts and relationships
3. **Graph creation** builds connections between related content
4. **Embedding generation** enables semantic search capabilities
### Visual Knowledge Graph
Explore your knowledge as an interactive network with flexible curation options:
**Manual Curation** - Create knowledge entities and relationships yourself with full control over your graph structure
**AI Automation** - Let AI automatically extract entities and discover relationships from your content
**Hybrid Approach** - Get AI-suggested relationships and entities that you can manually review, edit, or approve
The graph visualization shows:
- Knowledge entities as nodes (manually created or AI-extracted)
- Relationships as connections (manually defined, AI-discovered, or suggested)
- Interactive navigation for discovery and editing
### Optional FastEmbed Reranking
Minne ships with an opt-in reranking stage powered by [fastembed-rs](https://github.com/Anush008/fastembed-rs). When enabled, the hybrid retrieval results are rescored with a lightweight cross-encoder before being returned to chat or ingestion flows. In practice this often means more relevant results, boosting answer quality and downstream enrichment.
⚠️ **Resource notes**
- Enabling reranking downloads and caches ~1.1GB of model data on first startup (cached under `<data_dir>/fastembed/reranker` by default).
- Initialization takes longer while warming the cache, and each query consumes extra CPU. The default pool size (2) is tuned for a singe user setup, but could work with a pool size on 1 as well.
- The feature is disabled by default. Set `reranking_enabled: true` (or `RERANKING_ENABLED=true`) if youre comfortable with the additional footprint.
Example configuration:
```yaml
reranking_enabled: true
reranking_pool_size: 2
fastembed_cache_dir: "/var/lib/minne/fastembed" # optional override, defaults to .fastembed_cache
```
## Tech Stack
- **Backend:** Rust. Server-Side Rendering (SSR). Axum. Minijinja for templating.
- **Frontend:** HTML. HTMX and plain JavaScript for interactivity.
- **Database:** SurrealDB
- **AI Integration:** OpenAI API compatible endpoint (for chat and content processing), with support for structured outputs.
- **Web Content Processing:** Relies on a Chromium instance for robust webpage fetching/rendering.
## Prerequisites
- **For Docker/Nix:** Docker or Nix installed. These methods handle SurrealDB and Chromium dependencies.
- **For Binaries/Source:**
- A running SurrealDB instance.
- Chromium (or a compatible Chrome browser) installed and accessible in your `PATH`.
- Git (if cloning and building from source).
- Rust toolchain (if building from source).
## Getting Started
You have several options to get Minne up and running:
### 1. Nix (Recommended for ease of dependency management)
If you have Nix installed, you can run Minne directly:
```bash
nix run 'github:perstarkse/minne#main'
```
This command will fetch Minne and its dependencies (including Chromium) and run the `main` (combined server/worker) application.
### 2. Docker Compose (Recommended for containerized environments)
This is a great way to manage Minne and its SurrealDB dependency together.
1. Clone the repository (or just save the `docker-compose.yml` below).
1. Create a `docker-compose.yml` file:
```yaml
version: "3.8"
services:
minne:
image: ghcr.io/perstarkse/minne:latest # Pulls the latest pre-built image
# Or, to build from local source:
# build: .
container_name: minne_app
ports:
- "3000:3000" # Exposes Minne on port 3000
environment:
# These are examples, ensure they match your SurrealDB setup below
# and your actual OpenAI key.
SURREALDB_ADDRESS: "ws://surrealdb:8000"
SURREALDB_USERNAME: "root_user" # Default from SurrealDB service below
SURREALDB_PASSWORD: "root_password" # Default from SurrealDB service below
SURREALDB_DATABASE: "minne_db"
SURREALDB_NAMESPACE: "minne_ns"
OPENAI_API_KEY: "your_openai_api_key_here" # IMPORTANT: Replace with your actual key
#OPENAI_BASE_URL: "your_ollama_address" # Uncomment this and change it to override the default openai base url
HTTP_PORT: 3000
DATA_DIR: "/data" # Data directory inside the container
RUST_LOG: "minne=info,tower_http=info" # Example logging level
volumes:
- ./minne_data:/data # Persists Minne's data (e.g., scraped content) on the host
depends_on:
- surrealdb
networks:
- minne-net
# Waits for SurrealDB to be ready before starting Minne
command: >
sh -c "
echo 'Waiting for SurrealDB to start...' &&
# Adjust sleep time if SurrealDB takes longer to initialize in your environment
until nc -z surrealdb 8000; do echo 'Waiting for SurrealDB...'; sleep 2; done &&
echo 'SurrealDB is up, starting Minne application...' &&
/usr/local/bin/main
"
# For separate server/worker:
# command: /usr/local/bin/server # or /usr/local/bin/worker
surrealdb:
image: surrealdb/surrealdb:latest
container_name: minne_surrealdb
ports:
# Exposes SurrealDB on port 8000 (primarily for direct access/debugging if needed,
# not strictly required for Minne if only accessed internally by the minne service)
- "127.0.0.1:8000:8000" # Bind to localhost only for SurrealDB by default
volumes:
# Persists SurrealDB data on the host in a 'surreal_database' folder
- ./surreal_database:/database
command: >
start
--log info # Consider 'debug' for troubleshooting
--user root_user
--pass root_password
file:/database/minne_v1.db # Using file-based storage for simplicity
networks:
- minne-net
volumes:
minne_data: {} # Defines a named volume for Minne data (can be managed by Docker)
surreal_database: {} # Defines a named volume for SurrealDB data
networks:
minne-net:
driver: bridge
```
1. Run:
```bash
docker compose up -d
```
Minne will be accessible at `http://localhost:3000`.
### 3. Pre-built Binaries (GitHub Releases)
Binaries for Windows, macOS, and Linux (combined `main` version) are available on the [GitHub Releases page](https://github.com/perstarkse/minne/releases/latest).
1. Download the appropriate binary for your system.
1. **You will need to provide and run SurrealDB and have Chromium installed and accessible in your PATH separately.**
1. Set the required [Configuration](#configuration) environment variables or use a `config.yaml`.
1. Run the executable.
### 4. Build from Source
1. Clone the repository:
```bash
git clone https://github.com/perstarkse/minne.git
cd minne
```
1. **You will need to provide and run SurrealDB and have Chromium installed and accessible in your PATH separately.**
1. Set the required [Configuration](#configuration) environment variables or use a `config.yaml`.
1. Build and run:
- For the combined `main` binary:
```bash
cargo run --release --bin main
```
- For the `server` binary:
```bash
cargo run --release --bin server
```
- For the `worker` binary (if you want to run it separately):
```bash
cargo run --release --bin worker
```
The compiled binaries will be in `target/release/`.
- **Backend:** Rust with Axum framework and Server-Side Rendering (SSR)
- **Frontend:** HTML with HTMX and minimal JavaScript for interactivity
- **Database:** SurrealDB (graph, document, and vector search)
- **AI Integration:** OpenAI-compatible API with structured outputs
- **Web Processing:** Headless Chrome for robust webpage content extraction
## Configuration
Minne can be configured using environment variables or a `config.yaml` file placed in the working directory where you run the application. Environment variables take precedence over `config.yaml`.
Minne can be configured using environment variables or a `config.yaml` file. Environment variables take precedence over `config.yaml`.
**Required Configuration:**
### Required Configuration
- `SURREALDB_ADDRESS`: WebSocket address of your SurrealDB instance (e.g., `ws://127.0.0.1:8000` or `ws://surrealdb:8000` for Docker).
- `SURREALDB_USERNAME`: Username for SurrealDB (e.g., `root_user`).
- `SURREALDB_PASSWORD`: Password for SurrealDB (e.g., `root_password`).
- `SURREALDB_DATABASE`: Database name in SurrealDB (e.g., `minne_db`).
- `SURREALDB_NAMESPACE`: Namespace in SurrealDB (e.g., `minne_ns`).
- `OPENAI_API_KEY`: Your API key for OpenAI compatible endpoint (e.g., `sk-YourActualOpenAIKeyGoesHere`).
- `HTTP_PORT`: Port for the Minne server to listen on (Default: `3000`).
- `SURREALDB_ADDRESS`: WebSocket address of your SurrealDB instance (e.g., `ws://127.0.0.1:8000`)
- `SURREALDB_USERNAME`: Username for SurrealDB (e.g., `root_user`)
- `SURREALDB_PASSWORD`: Password for SurrealDB (e.g., `root_password`)
- `SURREALDB_DATABASE`: Database name in SurrealDB (e.g., `minne_db`)
- `SURREALDB_NAMESPACE`: Namespace in SurrealDB (e.g., `minne_ns`)
- `OPENAI_API_KEY`: Your API key for OpenAI compatible endpoint
- `HTTP_PORT`: Port for the Minne server (Default: `3000`)
**Optional Configuration:**
### Optional Configuration
- `RUST_LOG`: Controls logging level (e.g., `minne=info,tower_http=debug`).
- `DATA_DIR`: Directory to store local data like fetched webpage content (e.g., `./data`).
- `OPENAI_BASE_URL`: Base URL to a OpenAI API provider, such as Ollama.
- `RUST_LOG`: Controls logging level (e.g., `minne=info,tower_http=debug`)
- `DATA_DIR`: Directory to store local data (e.g., `./data`)
- `OPENAI_BASE_URL`: Base URL for custom AI providers (like Ollama)
- `RERANKING_ENABLED` / `reranking_enabled`: Set to `true` to enable the FastEmbed reranking stage (default `false`)
- `RERANKING_POOL_SIZE` / `reranking_pool_size`: Maximum concurrent reranker workers (defaults to `2`)
- `FASTEMBED_CACHE_DIR` / `fastembed_cache_dir`: Directory for cached FastEmbed models (defaults to `<data_dir>/fastembed/reranker`)
- `FASTEMBED_SHOW_DOWNLOAD_PROGRESS` / `fastembed_show_download_progress`: Show model download progress when warming the cache (default `true`)
**Example `config.yaml`:**
### Example config.yaml
```yaml
surrealdb_address: "ws://127.0.0.1:8000"
@@ -213,66 +161,105 @@ http_port: 3000
# rust_log: "info"
```
## Application Architecture (Binaries)
## Installation Options
Minne offers flexibility in deployment:
### 1. Docker Compose (Recommended)
- **`main`**: A combined binary running both server (API, web UI) and worker (background tasks) in one process. Ideal for simpler setups.
- **`server`**: Runs only the server component.
- **`worker`**: Runs only the worker component, suitable for deployment on a machine with more resources for intensive tasks.
```bash
# Clone and run
git clone https://github.com/perstarkse/minne.git
cd minne
docker compose up -d
```
This modularity allows scaling and resource optimization. The `main` binary or the Docker Compose setup (using `main`) is sufficient for most users.
The included `docker-compose.yml` handles SurrealDB and Chromium dependencies automatically.
### 2. Nix
```bash
nix run 'github:perstarkse/minne#main'
```
This fetches Minne and all dependencies, including Chromium.
### 3. Pre-built Binaries
Download binaries for Windows, macOS, and Linux from the [GitHub Releases](https://github.com/perstarkse/minne/releases/latest).
**Requirements:** You'll need to provide SurrealDB and Chromium separately.
### 4. Build from Source
```bash
git clone https://github.com/perstarkse/minne.git
cd minne
cargo run --release --bin main
```
**Requirements:** SurrealDB and Chromium must be installed and accessible in your PATH.
## Application Architecture
Minne offers flexible deployment options:
- **`main`**: Combined server and worker in one process (recommended for most users)
- **`server`**: Web interface and API only
- **`worker`**: Background processing only (for resource optimization)
## Usage
Once Minne is running:
Once Minne is running at `http://localhost:3000`:
1. Access the web interface at `http://localhost:3000` (or your configured port).
1. On iOS, consider setting up the [Minne iOS Shortcut](https://www.icloud.com/shortcuts/9aa960600ec14329837ba4169f57a166) for effortless content sending. **Add the shortcut, replace the [insert_url] and the [insert_api_key] snippets**.
1. Add notes, URLs, **audio files**, and explore your growing knowledge graph.
1. Engage with the chat interface to query your saved content.
1. Try the experimental visual graph explorer to see connections.
1. **Web Interface**: Full-featured experience for desktop and mobile
2. **iOS Shortcut**: Use the [Minne iOS Shortcut](https://www.icloud.com/shortcuts/e433fbd7602f4e2eaa70dca162323477) for quick content capture
3. **Content Types**: Save notes, URLs, audio files, and more
4. **Knowledge Graph**: Explore automatic connections between your content
5. **Chat Interface**: Query your knowledge base conversationally
## AI Configuration & Model Selection
Minne relies on an OpenAI-compatible API for processing content, generating graph relationships, and powering the chat feature.
### Setting Up AI Providers
**Environment Variables / `config.yaml` keys:**
Minne uses OpenAI-compatible APIs. Configure via environment variables or `config.yaml`:
- `OPENAI_API_KEY` (required): Your API key for the chosen AI provider.
- `OPENAI_BASE_URL` (optional): Use this to override the default OpenAI API URL (`https://api.openai.com/v1`). This is essential for using local models via services like Ollama, or other API providers.
- **Example for Ollama:** `http://<your-ollama-ip>:11434/v1`
- `OPENAI_API_KEY` (required): Your API key
- `OPENAI_BASE_URL` (optional): Custom provider URL (e.g., Ollama: `http://localhost:11434/v1`)
### Changing Models
### Model Selection
Once you have configured the `OPENAI_BASE_URL` to point to your desired provider, you can select the specific models Minne should use.
1. Navigate to the `/admin` page in your Minne instance.
1. The page will list the models available from your configured endpoint. You can select different models for processing content and for chat.
1. **Important:** For content processing, Minne relies on structured outputs (function calling). The model and provider you select for this task **must** support this feature.
1. **Embedding Dimensions:** If you change the embedding model, you **must** update the "Embedding Dimensions" setting in the admin panel to match the output dimensions of your new model (e.g., `text-embedding-3-small` uses 1536, `nomic-embed-text` uses 768). Mismatched dimensions will cause errors. Some newer models will accept a dimension argument, and for these setting the dimensions to whatever should work.
1. Access the `/admin` page in your Minne instance
2. Select models for content processing and chat from your configured provider
3. **Content Processing Requirements**: The model must support structured outputs
4. **Embedding Dimensions**: Update this setting when changing embedding models (e.g., 1536 for `text-embedding-3-small`, 768 for `nomic-embed-text`)
## Roadmap
I've developed Minne primarily for my own use, but having been in the selfhosted space for a long time, and using the efforts by others, I thought I'd share with the community. Feature requests are welcome.
The roadmap as of now is:
Current development focus:
~~- Handle uploaded images wisely.~~
~~- An updated explorer of the graph database.~~
- A TUI frontend which opens your system default editor for improved writing and document management.
- TUI frontend with system editor integration
- Enhanced reranking for improved retrieval recall
- Additional content type support
## Contributing
Contributions are welcome! Whether it's bug reports, feature suggestions, documentation improvements, or code contributions, please feel free to open an issue or submit a pull request.
Feature requests and contributions are welcome!
## Development
Run test with
```rust
```bash
# Run tests
cargo test
# Development build
cargo build
# Comprehensive linting
cargo clippy --workspace --all-targets --all-features
```
There is currently a variety of unit tests for commonly used functions. Additional tests, especially integration tests would be very welcome.
The codebase includes extensive unit tests. Integration tests and additional contributions are welcome.
## Contributing
I've developed Minne primarily for my own use, but having been in the selfhosted space for a long time, and using the efforts by others, I thought I'd share with the community. Feature requests are welcome.
## License
Minne is licensed under the **GNU Affero General Public License v3.0 (AGPL-3.0)**. See the [LICENSE](LICENSE) file for details. This means if you run a modified version of Minne as a network service, you must also offer the source code of that modified version to its users.
Minne is licensed under the **GNU Affero General Public License v3.0 (AGPL-3.0)**. See the [LICENSE](LICENSE) file for details.

View File

@@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2021"
license = "AGPL-3.0-or-later"
[lints]
workspace = true
[dependencies]
tokio = { workspace = true }
serde = { workspace = true }

View File

@@ -1,15 +1,22 @@
use std::sync::Arc;
use common::{storage::db::SurrealDbClient, utils::config::AppConfig};
use common::{
storage::{db::SurrealDbClient, store::StorageManager},
utils::config::AppConfig,
};
#[derive(Clone)]
pub struct ApiState {
pub db: Arc<SurrealDbClient>,
pub config: AppConfig,
pub storage: StorageManager,
}
impl ApiState {
pub async fn new(config: &AppConfig) -> Result<Self, Box<dyn std::error::Error>> {
pub async fn new(
config: &AppConfig,
storage: StorageManager,
) -> Result<Self, Box<dyn std::error::Error>> {
let surreal_db_client = Arc::new(
SurrealDbClient::new(
&config.surrealdb_address,
@@ -23,9 +30,10 @@ impl ApiState {
surreal_db_client.apply_migrations().await?;
let app_state = ApiState {
let app_state = Self {
db: surreal_db_client.clone(),
config: config.clone(),
storage,
};
Ok(app_state)

View File

@@ -27,40 +27,40 @@ impl From<AppError> for ApiError {
match err {
AppError::Database(_) | AppError::OpenAI(_) => {
tracing::error!("Internal error: {:?}", err);
ApiError::InternalError("Internal server error".to_string())
Self::InternalError("Internal server error".to_string())
}
AppError::NotFound(msg) => ApiError::NotFound(msg),
AppError::Validation(msg) => ApiError::ValidationError(msg),
AppError::Auth(msg) => ApiError::Unauthorized(msg),
_ => ApiError::InternalError("Internal server error".to_string()),
AppError::NotFound(msg) => Self::NotFound(msg),
AppError::Validation(msg) => Self::ValidationError(msg),
AppError::Auth(msg) => Self::Unauthorized(msg),
_ => Self::InternalError("Internal server error".to_string()),
}
}
}
impl IntoResponse for ApiError {
fn into_response(self) -> Response {
let (status, error_response) = match self {
ApiError::InternalError(message) => (
Self::InternalError(message) => (
StatusCode::INTERNAL_SERVER_ERROR,
ErrorResponse {
error: message,
status: "error".to_string(),
},
),
ApiError::ValidationError(message) => (
Self::ValidationError(message) => (
StatusCode::BAD_REQUEST,
ErrorResponse {
error: message,
status: "error".to_string(),
},
),
ApiError::NotFound(message) => (
Self::NotFound(message) => (
StatusCode::NOT_FOUND,
ErrorResponse {
error: message,
status: "error".to_string(),
},
),
ApiError::Unauthorized(message) => (
Self::Unauthorized(message) => (
StatusCode::UNAUTHORIZED,
ErrorResponse {
error: message,

View File

@@ -13,14 +13,12 @@ pub async fn api_auth(
mut request: Request,
next: Next,
) -> Result<Response, ApiError> {
let api_key = extract_api_key(&request).ok_or(ApiError::Unauthorized(
"You have to be authenticated".to_string(),
))?;
let api_key = extract_api_key(&request)
.ok_or_else(|| ApiError::Unauthorized("You have to be authenticated".to_string()))?;
let user = User::find_by_api_key(&api_key, &state.db).await?;
let user = user.ok_or(ApiError::Unauthorized(
"You have to be authenticated".to_string(),
))?;
let user =
user.ok_or_else(|| ApiError::Unauthorized("You have to be authenticated".to_string()))?;
request.extensions_mut().insert(user);
@@ -37,7 +35,7 @@ fn extract_api_key(request: &Request) -> Option<String> {
.headers()
.get("Authorization")
.and_then(|v| v.to_str().ok())
.and_then(|auth| auth.strip_prefix("Bearer ").map(|s| s.trim()))
.and_then(|auth| auth.strip_prefix("Bearer ").map(str::trim))
})
.map(String::from)
}

View File

@@ -32,7 +32,8 @@ pub async fn ingest_data(
info!("Received input: {:?}", input);
let file_infos = try_join_all(input.files.into_iter().map(|file| {
FileInfo::new(file, &state.db, &user.id, &state.config).map_err(AppError::from)
FileInfo::new_with_storage(file, &state.db, &user.id, &state.storage)
.map_err(AppError::from)
}))
.await?;
@@ -46,9 +47,7 @@ pub async fn ingest_data(
let futures: Vec<_> = payloads
.into_iter()
.map(|object| {
IngestionTask::create_and_add_to_db(object.clone(), user.id.clone(), &state.db)
})
.map(|object| IngestionTask::create_and_add_to_db(object, user.id.clone(), &state.db))
.collect();
try_join_all(futures).await?;

View File

@@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2021"
license = "AGPL-3.0-or-later"
[lints]
workspace = true
[dependencies]
# Workspace dependencies
tokio = { workspace = true }
@@ -41,6 +44,7 @@ surrealdb-migrations = { workspace = true }
tokio-retry = { workspace = true }
object_store = { workspace = true }
bytes = { workspace = true }
state-machines = { workspace = true }
[features]

View File

@@ -0,0 +1,17 @@
-- Add FTS indexes for searching name and description on entities
DEFINE ANALYZER IF NOT EXISTS app_en_fts_analyzer
TOKENIZERS class
FILTERS lowercase, ascii, snowball(english);
DEFINE INDEX IF NOT EXISTS knowledge_entity_fts_name_idx ON TABLE knowledge_entity
FIELDS name
SEARCH ANALYZER app_en_fts_analyzer BM25;
DEFINE INDEX IF NOT EXISTS knowledge_entity_fts_description_idx ON TABLE knowledge_entity
FIELDS description
SEARCH ANALYZER app_en_fts_analyzer BM25;
DEFINE INDEX IF NOT EXISTS text_chunk_fts_chunk_idx ON TABLE text_chunk
FIELDS chunk
SEARCH ANALYZER app_en_fts_analyzer BM25;

View File

@@ -0,0 +1,173 @@
-- State machine migration for ingestion_task records
DEFINE FIELD IF NOT EXISTS state ON TABLE ingestion_task TYPE option<string>;
DEFINE FIELD IF NOT EXISTS attempts ON TABLE ingestion_task TYPE option<number>;
DEFINE FIELD IF NOT EXISTS max_attempts ON TABLE ingestion_task TYPE option<number>;
DEFINE FIELD IF NOT EXISTS scheduled_at ON TABLE ingestion_task TYPE option<datetime>;
DEFINE FIELD IF NOT EXISTS locked_at ON TABLE ingestion_task TYPE option<datetime>;
DEFINE FIELD IF NOT EXISTS lease_duration_secs ON TABLE ingestion_task TYPE option<number>;
DEFINE FIELD IF NOT EXISTS worker_id ON TABLE ingestion_task TYPE option<string>;
DEFINE FIELD IF NOT EXISTS error_code ON TABLE ingestion_task TYPE option<string>;
DEFINE FIELD IF NOT EXISTS error_message ON TABLE ingestion_task TYPE option<string>;
DEFINE FIELD IF NOT EXISTS last_error_at ON TABLE ingestion_task TYPE option<datetime>;
DEFINE FIELD IF NOT EXISTS priority ON TABLE ingestion_task TYPE option<number>;
REMOVE FIELD status ON TABLE ingestion_task;
DEFINE FIELD status ON TABLE ingestion_task TYPE option<object>;
DEFINE INDEX IF NOT EXISTS idx_ingestion_task_state_sched ON TABLE ingestion_task FIELDS state, scheduled_at;
LET $needs_migration = (SELECT count() AS count FROM type::table('ingestion_task') WHERE state = NONE)[0].count;
IF $needs_migration > 0 THEN {
-- Created -> Pending
UPDATE type::table('ingestion_task')
SET
state = "Pending",
attempts = 0,
max_attempts = 3,
scheduled_at = IF created_at != NONE THEN created_at ELSE time::now() END,
locked_at = NONE,
lease_duration_secs = 300,
worker_id = NONE,
error_code = NONE,
error_message = NONE,
last_error_at = NONE,
priority = 0
WHERE state = NONE
AND status != NONE
AND status.name = "Created";
-- InProgress -> Processing
UPDATE type::table('ingestion_task')
SET
state = "Processing",
attempts = IF status.attempts != NONE THEN status.attempts ELSE 1 END,
max_attempts = 3,
scheduled_at = IF status.last_attempt != NONE THEN status.last_attempt ELSE time::now() END,
locked_at = IF status.last_attempt != NONE THEN status.last_attempt ELSE time::now() END,
lease_duration_secs = 300,
worker_id = NONE,
error_code = NONE,
error_message = NONE,
last_error_at = NONE,
priority = 0
WHERE state = NONE
AND status != NONE
AND status.name = "InProgress";
-- Completed -> Succeeded
UPDATE type::table('ingestion_task')
SET
state = "Succeeded",
attempts = 1,
max_attempts = 3,
scheduled_at = IF updated_at != NONE THEN updated_at ELSE time::now() END,
locked_at = NONE,
lease_duration_secs = 300,
worker_id = NONE,
error_code = NONE,
error_message = NONE,
last_error_at = NONE,
priority = 0
WHERE state = NONE
AND status != NONE
AND status.name = "Completed";
-- Error -> DeadLetter (terminal failure)
UPDATE type::table('ingestion_task')
SET
state = "DeadLetter",
attempts = 3,
max_attempts = 3,
scheduled_at = IF updated_at != NONE THEN updated_at ELSE time::now() END,
locked_at = NONE,
lease_duration_secs = 300,
worker_id = NONE,
error_code = NONE,
error_message = status.message,
last_error_at = IF updated_at != NONE THEN updated_at ELSE time::now() END,
priority = 0
WHERE state = NONE
AND status != NONE
AND status.name = "Error";
-- Cancelled -> Cancelled
UPDATE type::table('ingestion_task')
SET
state = "Cancelled",
attempts = 0,
max_attempts = 3,
scheduled_at = IF updated_at != NONE THEN updated_at ELSE time::now() END,
locked_at = NONE,
lease_duration_secs = 300,
worker_id = NONE,
error_code = NONE,
error_message = NONE,
last_error_at = NONE,
priority = 0
WHERE state = NONE
AND status != NONE
AND status.name = "Cancelled";
-- Fallback for any remaining records missing state
UPDATE type::table('ingestion_task')
SET
state = "Pending",
attempts = 0,
max_attempts = 3,
scheduled_at = IF updated_at != NONE THEN updated_at ELSE time::now() END,
locked_at = NONE,
lease_duration_secs = 300,
worker_id = NONE,
error_code = NONE,
error_message = NONE,
last_error_at = NONE,
priority = 0
WHERE state = NONE;
} END;
-- Ensure defaults for newly added fields
UPDATE type::table('ingestion_task')
SET max_attempts = 3
WHERE max_attempts = NONE;
UPDATE type::table('ingestion_task')
SET lease_duration_secs = 300
WHERE lease_duration_secs = NONE;
UPDATE type::table('ingestion_task')
SET attempts = 0
WHERE attempts = NONE;
UPDATE type::table('ingestion_task')
SET priority = 0
WHERE priority = NONE;
UPDATE type::table('ingestion_task')
SET scheduled_at = IF updated_at != NONE THEN updated_at ELSE time::now() END
WHERE scheduled_at = NONE;
UPDATE type::table('ingestion_task')
SET locked_at = NONE
WHERE locked_at = NONE;
UPDATE type::table('ingestion_task')
SET worker_id = NONE
WHERE worker_id != NONE AND worker_id = "";
UPDATE type::table('ingestion_task')
SET error_code = NONE
WHERE error_code = NONE;
UPDATE type::table('ingestion_task')
SET error_message = NONE
WHERE error_message = NONE;
UPDATE type::table('ingestion_task')
SET last_error_at = NONE
WHERE last_error_at = NONE;
UPDATE type::table('ingestion_task')
SET status = NONE
WHERE status != NONE;

View File

@@ -0,0 +1,24 @@
-- Add scratchpad table and schema
-- Define scratchpad table and schema
DEFINE TABLE IF NOT EXISTS scratchpad SCHEMALESS;
-- Standard fields from stored_object! macro
DEFINE FIELD IF NOT EXISTS created_at ON scratchpad TYPE datetime;
DEFINE FIELD IF NOT EXISTS updated_at ON scratchpad TYPE datetime;
-- Custom fields from the Scratchpad struct
DEFINE FIELD IF NOT EXISTS user_id ON scratchpad TYPE string;
DEFINE FIELD IF NOT EXISTS title ON scratchpad TYPE string;
DEFINE FIELD IF NOT EXISTS content ON scratchpad TYPE string;
DEFINE FIELD IF NOT EXISTS last_saved_at ON scratchpad TYPE datetime;
DEFINE FIELD IF NOT EXISTS is_dirty ON scratchpad TYPE bool DEFAULT false;
DEFINE FIELD IF NOT EXISTS is_archived ON scratchpad TYPE bool DEFAULT false;
DEFINE FIELD IF NOT EXISTS archived_at ON scratchpad TYPE option<datetime>;
DEFINE FIELD IF NOT EXISTS ingested_at ON scratchpad TYPE option<datetime>;
-- Indexes based on query patterns
DEFINE INDEX IF NOT EXISTS scratchpad_user_idx ON scratchpad FIELDS user_id;
DEFINE INDEX IF NOT EXISTS scratchpad_user_archived_idx ON scratchpad FIELDS user_id, is_archived;
DEFINE INDEX IF NOT EXISTS scratchpad_updated_idx ON scratchpad FIELDS updated_at;
DEFINE INDEX IF NOT EXISTS scratchpad_archived_idx ON scratchpad FIELDS archived_at;

View File

@@ -0,0 +1 @@
{"schemas":"--- original\n+++ modified\n@@ -137,6 +137,30 @@\n DEFINE INDEX IF NOT EXISTS relates_to_metadata_source_id_idx ON relates_to FIELDS metadata.source_id;\n DEFINE INDEX IF NOT EXISTS relates_to_metadata_user_id_idx ON relates_to FIELDS metadata.user_id;\n\n+# Defines the schema for the 'scratchpad' table.\n+\n+DEFINE TABLE IF NOT EXISTS scratchpad SCHEMALESS;\n+\n+# Standard fields from stored_object! macro\n+DEFINE FIELD IF NOT EXISTS created_at ON scratchpad TYPE datetime;\n+DEFINE FIELD IF NOT EXISTS updated_at ON scratchpad TYPE datetime;\n+\n+# Custom fields from the Scratchpad struct\n+DEFINE FIELD IF NOT EXISTS user_id ON scratchpad TYPE string;\n+DEFINE FIELD IF NOT EXISTS title ON scratchpad TYPE string;\n+DEFINE FIELD IF NOT EXISTS content ON scratchpad TYPE string;\n+DEFINE FIELD IF NOT EXISTS last_saved_at ON scratchpad TYPE datetime;\n+DEFINE FIELD IF NOT EXISTS is_dirty ON scratchpad TYPE bool DEFAULT false;\n+DEFINE FIELD IF NOT EXISTS is_archived ON scratchpad TYPE bool DEFAULT false;\n+DEFINE FIELD IF NOT EXISTS archived_at ON scratchpad TYPE option<datetime>;\n+DEFINE FIELD IF NOT EXISTS ingested_at ON scratchpad TYPE option<datetime>;\n+\n+# Indexes based on query patterns\n+DEFINE INDEX IF NOT EXISTS scratchpad_user_idx ON scratchpad FIELDS user_id;\n+DEFINE INDEX IF NOT EXISTS scratchpad_user_archived_idx ON scratchpad FIELDS user_id, is_archived;\n+DEFINE INDEX IF NOT EXISTS scratchpad_updated_idx ON scratchpad FIELDS updated_at;\n+DEFINE INDEX IF NOT EXISTS scratchpad_archived_idx ON scratchpad FIELDS archived_at;\n+\n DEFINE TABLE OVERWRITE script_migration SCHEMAFULL\n PERMISSIONS\n FOR select FULL\n","events":null}

View File

@@ -0,0 +1,23 @@
# Defines the schema for the 'scratchpad' table.
DEFINE TABLE IF NOT EXISTS scratchpad SCHEMALESS;
# Standard fields from stored_object! macro
DEFINE FIELD IF NOT EXISTS created_at ON scratchpad TYPE datetime;
DEFINE FIELD IF NOT EXISTS updated_at ON scratchpad TYPE datetime;
# Custom fields from the Scratchpad struct
DEFINE FIELD IF NOT EXISTS user_id ON scratchpad TYPE string;
DEFINE FIELD IF NOT EXISTS title ON scratchpad TYPE string;
DEFINE FIELD IF NOT EXISTS content ON scratchpad TYPE string;
DEFINE FIELD IF NOT EXISTS last_saved_at ON scratchpad TYPE datetime;
DEFINE FIELD IF NOT EXISTS is_dirty ON scratchpad TYPE bool DEFAULT false;
DEFINE FIELD IF NOT EXISTS is_archived ON scratchpad TYPE bool DEFAULT false;
DEFINE FIELD IF NOT EXISTS archived_at ON scratchpad TYPE option<datetime>;
DEFINE FIELD IF NOT EXISTS ingested_at ON scratchpad TYPE option<datetime>;
# Indexes based on query patterns
DEFINE INDEX IF NOT EXISTS scratchpad_user_idx ON scratchpad FIELDS user_id;
DEFINE INDEX IF NOT EXISTS scratchpad_user_archived_idx ON scratchpad FIELDS user_id, is_archived;
DEFINE INDEX IF NOT EXISTS scratchpad_updated_idx ON scratchpad FIELDS updated_at;
DEFINE INDEX IF NOT EXISTS scratchpad_archived_idx ON scratchpad FIELDS archived_at;

View File

@@ -80,15 +80,18 @@ impl SurrealDbClient {
/// Operation to rebuild indexes
pub async fn rebuild_indexes(&self) -> Result<(), Error> {
debug!("Rebuilding indexes");
self.client
.query("REBUILD INDEX IF EXISTS idx_embedding_chunks ON text_chunk")
.await?;
self.client
.query("REBUILD INDEX IF EXISTS idx_embedding_entities ON knowledge_entity")
.await?;
self.client
.query("REBUILD INDEX IF EXISTS text_content_fts_idx ON text_content")
.await?;
let rebuild_sql = r#"
BEGIN TRANSACTION;
REBUILD INDEX IF EXISTS idx_embedding_chunks ON text_chunk;
REBUILD INDEX IF EXISTS idx_embedding_entities ON knowledge_entity;
REBUILD INDEX IF EXISTS text_content_fts_idx ON text_content;
REBUILD INDEX IF EXISTS knowledge_entity_fts_name_idx ON knowledge_entity;
REBUILD INDEX IF EXISTS knowledge_entity_fts_description_idx ON knowledge_entity;
REBUILD INDEX IF EXISTS text_chunk_fts_chunk_idx ON text_chunk;
COMMIT TRANSACTION;
"#;
self.client.query(rebuild_sql).await?;
Ok(())
}

View File

@@ -1,4 +1,5 @@
use std::path::{Path, PathBuf};
use std::io::ErrorKind;
use std::path::{Component, Path, PathBuf};
use std::sync::Arc;
use anyhow::{anyhow, Result as AnyResult};
@@ -6,36 +7,421 @@ use bytes::Bytes;
use futures::stream::BoxStream;
use futures::{StreamExt, TryStreamExt};
use object_store::local::LocalFileSystem;
use object_store::memory::InMemory;
use object_store::{path::Path as ObjPath, ObjectStore};
use crate::utils::config::{AppConfig, StorageKind};
pub type DynStore = Arc<dyn ObjectStore>;
/// Build an object store instance anchored at the given filesystem `prefix`.
/// Storage manager with persistent state and proper lifecycle management.
#[derive(Clone)]
pub struct StorageManager {
store: DynStore,
backend_kind: StorageKind,
local_base: Option<PathBuf>,
}
impl StorageManager {
/// Create a new StorageManager with the specified configuration.
///
/// This method validates the configuration and creates the appropriate
/// storage backend with proper initialization.
pub async fn new(cfg: &AppConfig) -> object_store::Result<Self> {
let backend_kind = cfg.storage.clone();
let (store, local_base) = create_storage_backend(cfg).await?;
Ok(Self {
store,
backend_kind,
local_base,
})
}
/// Create a StorageManager with a custom storage backend.
///
/// This method is useful for testing scenarios where you want to inject
/// a specific storage backend.
pub fn with_backend(store: DynStore, backend_kind: StorageKind) -> Self {
Self {
store,
backend_kind,
local_base: None,
}
}
/// Get the storage backend kind.
pub fn backend_kind(&self) -> &StorageKind {
&self.backend_kind
}
/// Access the resolved local base directory when using the local backend.
pub fn local_base_path(&self) -> Option<&Path> {
self.local_base.as_deref()
}
/// Resolve an object location to a filesystem path when using the local backend.
///
/// Returns `None` when the backend is not local or when the provided location includes
/// unsupported components (absolute paths or parent traversals).
pub fn resolve_local_path(&self, location: &str) -> Option<PathBuf> {
let base = self.local_base_path()?;
let relative = Path::new(location);
if relative.is_absolute()
|| relative
.components()
.any(|component| matches!(component, Component::ParentDir | Component::Prefix(_)))
{
return None;
}
Some(base.join(relative))
}
/// Store bytes at the specified location.
///
/// This operation persists data using the underlying storage backend.
/// For memory backends, data persists for the lifetime of the StorageManager.
pub async fn put(&self, location: &str, data: Bytes) -> object_store::Result<()> {
let path = ObjPath::from(location);
let payload = object_store::PutPayload::from_bytes(data);
self.store.put(&path, payload).await.map(|_| ())
}
/// Retrieve bytes from the specified location.
///
/// Returns the full contents buffered in memory.
pub async fn get(&self, location: &str) -> object_store::Result<Bytes> {
let path = ObjPath::from(location);
let result = self.store.get(&path).await?;
result.bytes().await
}
/// Get a streaming handle for large objects.
///
/// Returns a fallible stream of Bytes chunks suitable for large file processing.
pub async fn get_stream(
&self,
location: &str,
) -> object_store::Result<BoxStream<'static, object_store::Result<Bytes>>> {
let path = ObjPath::from(location);
let result = self.store.get(&path).await?;
Ok(result.into_stream())
}
/// Delete all objects below the specified prefix.
///
/// For local filesystem backends, this also attempts to clean up empty directories.
pub async fn delete_prefix(&self, prefix: &str) -> object_store::Result<()> {
let prefix_path = ObjPath::from(prefix);
let locations = self
.store
.list(Some(&prefix_path))
.map_ok(|m| m.location)
.boxed();
self.store
.delete_stream(locations)
.try_collect::<Vec<_>>()
.await?;
// Cleanup filesystem directories only for local backend
if matches!(self.backend_kind, StorageKind::Local) {
self.cleanup_filesystem_directories(prefix).await?;
}
Ok(())
}
/// List all objects below the specified prefix.
pub async fn list(
&self,
prefix: Option<&str>,
) -> object_store::Result<Vec<object_store::ObjectMeta>> {
let prefix_path = prefix.map(ObjPath::from);
self.store.list(prefix_path.as_ref()).try_collect().await
}
/// Check if an object exists at the specified location.
pub async fn exists(&self, location: &str) -> object_store::Result<bool> {
let path = ObjPath::from(location);
self.store
.head(&path)
.await
.map(|_| true)
.or_else(|e| match e {
object_store::Error::NotFound { .. } => Ok(false),
_ => Err(e),
})
}
/// Cleanup filesystem directories for local backend.
///
/// This is a best-effort cleanup and ignores errors.
async fn cleanup_filesystem_directories(&self, prefix: &str) -> object_store::Result<()> {
if !matches!(self.backend_kind, StorageKind::Local) {
return Ok(());
}
let Some(base) = &self.local_base else {
return Ok(());
};
let relative = Path::new(prefix);
if relative.is_absolute()
|| relative
.components()
.any(|component| matches!(component, Component::ParentDir | Component::Prefix(_)))
{
tracing::warn!(
prefix = %prefix,
"Skipping directory cleanup for unsupported prefix components"
);
return Ok(());
}
let mut current = base.join(relative);
while current.starts_with(base) && current.as_path() != base.as_path() {
match tokio::fs::remove_dir(&current).await {
Ok(_) => {}
Err(err) => match err.kind() {
ErrorKind::NotFound => {}
ErrorKind::DirectoryNotEmpty => break,
_ => tracing::debug!(
error = %err,
path = %current.display(),
"Failed to remove directory during cleanup"
),
},
}
if let Some(parent) = current.parent() {
current = parent.to_path_buf();
} else {
break;
}
}
Ok(())
}
}
/// Create a storage backend based on configuration.
///
/// - For the `Local` backend, `prefix` is the absolute directory on disk that
/// serves as the root for all object paths passed to the store.
/// - `prefix` must already exist; this function will create it if missing.
///
/// Example (Local):
/// - prefix: `/var/data`
/// - object location: `user/uuid/file.txt`
/// - absolute path: `/var/data/user/uuid/file.txt`
pub async fn build_store(prefix: &Path, cfg: &AppConfig) -> object_store::Result<DynStore> {
/// This factory function handles the creation and initialization of different
/// storage backends with proper error handling and validation.
async fn create_storage_backend(
cfg: &AppConfig,
) -> object_store::Result<(DynStore, Option<PathBuf>)> {
match cfg.storage {
StorageKind::Local => {
if !prefix.exists() {
tokio::fs::create_dir_all(prefix).await.map_err(|e| {
let base = resolve_base_dir(cfg);
if !base.exists() {
tokio::fs::create_dir_all(&base).await.map_err(|e| {
object_store::Error::Generic {
store: "LocalFileSystem",
source: e.into(),
}
})?;
}
let store = LocalFileSystem::new_with_prefix(prefix)?;
Ok(Arc::new(store))
let store = LocalFileSystem::new_with_prefix(base.clone())?;
Ok((Arc::new(store), Some(base)))
}
StorageKind::Memory => {
let store = InMemory::new();
Ok((Arc::new(store), None))
}
}
}
/// Testing utilities for storage operations.
///
/// This module provides specialized utilities for testing scenarios with
/// automatic memory backend setup and proper test isolation.
#[cfg(test)]
pub mod testing {
use super::*;
use crate::utils::config::{AppConfig, PdfIngestMode};
use uuid;
/// Create a test configuration with memory storage.
///
/// This provides a ready-to-use configuration for testing scenarios
/// that don't require filesystem persistence.
pub fn test_config_memory() -> AppConfig {
AppConfig {
openai_api_key: "test".into(),
surrealdb_address: "test".into(),
surrealdb_username: "test".into(),
surrealdb_password: "test".into(),
surrealdb_namespace: "test".into(),
surrealdb_database: "test".into(),
data_dir: "/tmp/unused".into(), // Ignored for memory storage
http_port: 0,
openai_base_url: "..".into(),
storage: StorageKind::Memory,
pdf_ingest_mode: PdfIngestMode::LlmFirst,
..Default::default()
}
}
/// Create a test configuration with local storage.
///
/// This provides a ready-to-use configuration for testing scenarios
/// that require actual filesystem operations.
pub fn test_config_local() -> AppConfig {
let base = format!("/tmp/minne_test_storage_{}", uuid::Uuid::new_v4());
AppConfig {
openai_api_key: "test".into(),
surrealdb_address: "test".into(),
surrealdb_username: "test".into(),
surrealdb_password: "test".into(),
surrealdb_namespace: "test".into(),
surrealdb_database: "test".into(),
data_dir: base.into(),
http_port: 0,
openai_base_url: "..".into(),
storage: StorageKind::Local,
pdf_ingest_mode: PdfIngestMode::LlmFirst,
..Default::default()
}
}
/// A specialized StorageManager for testing scenarios.
///
/// This provides automatic setup for memory storage with proper isolation
/// and cleanup capabilities for test environments.
#[derive(Clone)]
pub struct TestStorageManager {
storage: StorageManager,
_temp_dir: Option<(String, std::path::PathBuf)>, // For local storage cleanup
}
impl TestStorageManager {
/// Create a new TestStorageManager with memory backend.
///
/// This is the preferred method for unit tests as it provides
/// fast execution and complete isolation.
pub async fn new_memory() -> object_store::Result<Self> {
let cfg = test_config_memory();
let storage = StorageManager::new(&cfg).await?;
Ok(Self {
storage,
_temp_dir: None,
})
}
/// Create a new TestStorageManager with local filesystem backend.
///
/// This method creates a temporary directory that will be automatically
/// cleaned up when the TestStorageManager is dropped.
pub async fn new_local() -> object_store::Result<Self> {
let cfg = test_config_local();
let storage = StorageManager::new(&cfg).await?;
let resolved = storage
.local_base_path()
.map(|path| (cfg.data_dir.clone(), path.to_path_buf()));
Ok(Self {
storage,
_temp_dir: resolved,
})
}
/// Create a TestStorageManager with custom configuration.
pub async fn with_config(cfg: &AppConfig) -> object_store::Result<Self> {
let storage = StorageManager::new(cfg).await?;
let temp_dir = if matches!(cfg.storage, StorageKind::Local) {
storage
.local_base_path()
.map(|path| (cfg.data_dir.clone(), path.to_path_buf()))
} else {
None
};
Ok(Self {
storage,
_temp_dir: temp_dir,
})
}
/// Get a reference to the underlying StorageManager.
pub fn storage(&self) -> &StorageManager {
&self.storage
}
/// Clone the underlying StorageManager.
pub fn clone_storage(&self) -> StorageManager {
self.storage.clone()
}
/// Store test data at the specified location.
pub async fn put(&self, location: &str, data: &[u8]) -> object_store::Result<()> {
self.storage.put(location, Bytes::from(data.to_vec())).await
}
/// Retrieve test data from the specified location.
pub async fn get(&self, location: &str) -> object_store::Result<Bytes> {
self.storage.get(location).await
}
/// Delete test data below the specified prefix.
pub async fn delete_prefix(&self, prefix: &str) -> object_store::Result<()> {
self.storage.delete_prefix(prefix).await
}
/// Check if test data exists at the specified location.
pub async fn exists(&self, location: &str) -> object_store::Result<bool> {
self.storage.exists(location).await
}
/// List all test objects below the specified prefix.
pub async fn list(
&self,
prefix: Option<&str>,
) -> object_store::Result<Vec<object_store::ObjectMeta>> {
self.storage.list(prefix).await
}
}
impl Drop for TestStorageManager {
fn drop(&mut self) {
// Clean up temporary directories for local storage
if let Some((_, path)) = &self._temp_dir {
if path.exists() {
let _ = std::fs::remove_dir_all(path);
}
}
}
}
/// Convenience macro for creating memory storage tests.
///
/// This macro simplifies the creation of test storage with memory backend.
#[macro_export]
macro_rules! test_storage_memory {
() => {{
async move {
$crate::storage::store::testing::TestStorageManager::new_memory()
.await
.expect("Failed to create test memory storage")
}
}};
}
/// Convenience macro for creating local storage tests.
///
/// This macro simplifies the creation of test storage with local filesystem backend.
#[macro_export]
macro_rules! test_storage_local {
() => {{
async move {
$crate::storage::store::testing::TestStorageManager::new_local()
.await
.expect("Failed to create test local storage")
}
}};
}
}
@@ -52,124 +438,6 @@ pub fn resolve_base_dir(cfg: &AppConfig) -> PathBuf {
}
}
/// Build an object store rooted at the configured data directory.
///
/// This is the recommended way to obtain a store for logical object operations
/// such as `put_bytes_at`, `get_bytes_at`, and `delete_prefix_at`.
pub async fn build_store_root(cfg: &AppConfig) -> object_store::Result<DynStore> {
let base = resolve_base_dir(cfg);
build_store(&base, cfg).await
}
/// Write bytes to `file_name` within a filesystem `prefix` using the configured store.
///
/// Prefer [`put_bytes_at`] for location-based writes that do not need to compute
/// a separate filesystem prefix.
pub async fn put_bytes(
prefix: &Path,
file_name: &str,
data: Bytes,
cfg: &AppConfig,
) -> object_store::Result<()> {
let store = build_store(prefix, cfg).await?;
let payload = object_store::PutPayload::from_bytes(data);
store.put(&ObjPath::from(file_name), payload).await?;
Ok(())
}
/// Write bytes to the provided logical object `location`, e.g. `"user/uuid/file"`.
///
/// The store root is taken from `AppConfig::data_dir` for the local backend.
/// This performs an atomic write as guaranteed by `object_store`.
pub async fn put_bytes_at(
location: &str,
data: Bytes,
cfg: &AppConfig,
) -> object_store::Result<()> {
let store = build_store_root(cfg).await?;
let payload = object_store::PutPayload::from_bytes(data);
store.put(&ObjPath::from(location), payload).await?;
Ok(())
}
/// Read bytes from `file_name` within a filesystem `prefix` using the configured store.
///
/// Prefer [`get_bytes_at`] for location-based reads.
pub async fn get_bytes(
prefix: &Path,
file_name: &str,
cfg: &AppConfig,
) -> object_store::Result<Bytes> {
let store = build_store(prefix, cfg).await?;
let r = store.get(&ObjPath::from(file_name)).await?;
let b = r.bytes().await?;
Ok(b)
}
/// Read bytes from the provided logical object `location`.
///
/// Returns the full contents buffered in memory.
pub async fn get_bytes_at(location: &str, cfg: &AppConfig) -> object_store::Result<Bytes> {
let store = build_store_root(cfg).await?;
let r = store.get(&ObjPath::from(location)).await?;
r.bytes().await
}
/// Get a streaming body for the provided logical object `location`.
///
/// Returns a fallible `BoxStream` of `Bytes`, suitable for use with
/// `axum::body::Body::from_stream` to stream responses without buffering.
pub async fn get_stream_at(
location: &str,
cfg: &AppConfig,
) -> object_store::Result<BoxStream<'static, object_store::Result<Bytes>>> {
let store = build_store_root(cfg).await?;
let r = store.get(&ObjPath::from(location)).await?;
Ok(r.into_stream())
}
/// Delete all objects below the provided filesystem `prefix`.
///
/// This is a low-level variant for when a dedicated on-disk prefix is used for a
/// particular object grouping. Prefer [`delete_prefix_at`] for location-based stores.
pub async fn delete_prefix(prefix: &Path, cfg: &AppConfig) -> object_store::Result<()> {
let store = build_store(prefix, cfg).await?;
// list everything and delete
let locations = store.list(None).map_ok(|m| m.location).boxed();
store
.delete_stream(locations)
.try_collect::<Vec<_>>()
.await?;
// Best effort remove the directory itself
if tokio::fs::try_exists(prefix).await.unwrap_or(false) {
let _ = tokio::fs::remove_dir_all(prefix).await;
}
Ok(())
}
/// Delete all objects below the provided logical object `prefix`, e.g. `"user/uuid/"`.
///
/// After deleting, attempts a best-effort cleanup of the now-empty directory on disk
/// when using the local backend.
pub async fn delete_prefix_at(prefix: &str, cfg: &AppConfig) -> object_store::Result<()> {
let store = build_store_root(cfg).await?;
let prefix_path = ObjPath::from(prefix);
let locations = store
.list(Some(&prefix_path))
.map_ok(|m| m.location)
.boxed();
store
.delete_stream(locations)
.try_collect::<Vec<_>>()
.await?;
// Best effort remove empty directory on disk for local storage
let base_dir = resolve_base_dir(cfg).join(prefix);
if tokio::fs::try_exists(&base_dir).await.unwrap_or(false) {
let _ = tokio::fs::remove_dir_all(&base_dir).await;
}
Ok(())
}
/// Split an absolute filesystem path into `(parent_dir, file_name)`.
pub fn split_abs_path(path: &str) -> AnyResult<(PathBuf, String)> {
let pb = PathBuf::from(path);
@@ -198,7 +466,6 @@ mod tests {
use super::*;
use crate::utils::config::{PdfIngestMode::LlmFirst, StorageKind};
use bytes::Bytes;
use futures::TryStreamExt;
use uuid::Uuid;
fn test_config(root: &str) -> AppConfig {
@@ -214,71 +481,357 @@ mod tests {
openai_base_url: "..".into(),
storage: StorageKind::Local,
pdf_ingest_mode: LlmFirst,
..Default::default()
}
}
fn test_config_memory() -> AppConfig {
AppConfig {
openai_api_key: "test".into(),
surrealdb_address: "test".into(),
surrealdb_username: "test".into(),
surrealdb_password: "test".into(),
surrealdb_namespace: "test".into(),
surrealdb_database: "test".into(),
data_dir: "/tmp/unused".into(), // Ignored for memory storage
http_port: 0,
openai_base_url: "..".into(),
storage: StorageKind::Memory,
pdf_ingest_mode: LlmFirst,
..Default::default()
}
}
#[tokio::test]
async fn test_build_store_root_creates_base() {
let base = format!("/tmp/minne_store_test_{}", Uuid::new_v4());
async fn test_storage_manager_memory_basic_operations() {
let cfg = test_config_memory();
let storage = StorageManager::new(&cfg)
.await
.expect("create storage manager");
assert!(storage.local_base_path().is_none());
let location = "test/data/file.txt";
let data = b"test data for storage manager";
// Test put and get
storage
.put(location, Bytes::from(data.to_vec()))
.await
.expect("put");
let retrieved = storage.get(location).await.expect("get");
assert_eq!(retrieved.as_ref(), data);
// Test exists
assert!(storage.exists(location).await.expect("exists check"));
// Test delete
storage.delete_prefix("test/data/").await.expect("delete");
assert!(!storage
.exists(location)
.await
.expect("exists check after delete"));
}
#[tokio::test]
async fn test_storage_manager_local_basic_operations() {
let base = format!("/tmp/minne_storage_test_{}", Uuid::new_v4());
let cfg = test_config(&base);
let _ = build_store_root(&cfg).await.expect("build store root");
assert!(tokio::fs::try_exists(&base).await.unwrap_or(false));
let storage = StorageManager::new(&cfg)
.await
.expect("create storage manager");
let resolved_base = storage
.local_base_path()
.expect("resolved base dir")
.to_path_buf();
assert_eq!(resolved_base, PathBuf::from(&base));
let location = "test/data/file.txt";
let data = b"test data for local storage";
// Test put and get
storage
.put(location, Bytes::from(data.to_vec()))
.await
.expect("put");
let retrieved = storage.get(location).await.expect("get");
assert_eq!(retrieved.as_ref(), data);
let object_dir = resolved_base.join("test/data");
tokio::fs::metadata(&object_dir)
.await
.expect("object directory exists after write");
// Test exists
assert!(storage.exists(location).await.expect("exists check"));
// Test delete
storage.delete_prefix("test/data/").await.expect("delete");
assert!(!storage
.exists(location)
.await
.expect("exists check after delete"));
assert!(
tokio::fs::metadata(&object_dir).await.is_err(),
"object directory should be removed"
);
tokio::fs::metadata(&resolved_base)
.await
.expect("base directory remains intact");
// Clean up
let _ = tokio::fs::remove_dir_all(&base).await;
}
#[tokio::test]
async fn test_put_get_bytes_at_and_delete_prefix_at() {
let base = format!("/tmp/minne_store_test_{}", Uuid::new_v4());
let cfg = test_config(&base);
let location_prefix = format!("{}/{}", "user1", Uuid::new_v4());
let file_name = "file.txt";
let location = format!("{}/{}", &location_prefix, file_name);
let payload = Bytes::from_static(b"hello world");
put_bytes_at(&location, payload.clone(), &cfg)
async fn test_storage_manager_memory_persistence() {
let cfg = test_config_memory();
let storage = StorageManager::new(&cfg)
.await
.expect("put");
let got = get_bytes_at(&location, &cfg).await.expect("get");
assert_eq!(got.as_ref(), payload.as_ref());
.expect("create storage manager");
// Delete the whole prefix and ensure retrieval fails
delete_prefix_at(&location_prefix, &cfg)
let location = "persistence/test.txt";
let data1 = b"first data";
let data2 = b"second data";
// Put first data
storage
.put(location, Bytes::from(data1.to_vec()))
.await
.expect("delete prefix");
assert!(get_bytes_at(&location, &cfg).await.is_err());
.expect("put first");
let _ = tokio::fs::remove_dir_all(&base).await;
// Retrieve and verify first data
let retrieved1 = storage.get(location).await.expect("get first");
assert_eq!(retrieved1.as_ref(), data1);
// Overwrite with second data
storage
.put(location, Bytes::from(data2.to_vec()))
.await
.expect("put second");
// Retrieve and verify second data
let retrieved2 = storage.get(location).await.expect("get second");
assert_eq!(retrieved2.as_ref(), data2);
// Data persists across multiple operations using the same StorageManager
assert_ne!(retrieved1.as_ref(), retrieved2.as_ref());
}
#[tokio::test]
async fn test_get_stream_at() {
let base = format!("/tmp/minne_store_test_{}", Uuid::new_v4());
let cfg = test_config(&base);
async fn test_storage_manager_list_operations() {
let cfg = test_config_memory();
let storage = StorageManager::new(&cfg)
.await
.expect("create storage manager");
let location = format!("{}/{}/stream.bin", "user2", Uuid::new_v4());
let content = vec![7u8; 32 * 1024]; // 32KB payload
// Create multiple files
let files = vec![
("dir1/file1.txt", b"content1"),
("dir1/file2.txt", b"content2"),
("dir2/file3.txt", b"content3"),
];
put_bytes_at(&location, Bytes::from(content.clone()), &cfg)
for (location, data) in &files {
storage
.put(location, Bytes::from(data.to_vec()))
.await
.expect("put");
}
// Test listing without prefix
let all_files = storage.list(None).await.expect("list all");
assert_eq!(all_files.len(), 3);
// Test listing with prefix
let dir1_files = storage.list(Some("dir1/")).await.expect("list dir1");
assert_eq!(dir1_files.len(), 2);
assert!(dir1_files
.iter()
.any(|meta| meta.location.as_ref().contains("file1.txt")));
assert!(dir1_files
.iter()
.any(|meta| meta.location.as_ref().contains("file2.txt")));
// Test listing non-existent prefix
let empty_files = storage
.list(Some("nonexistent/"))
.await
.expect("list nonexistent");
assert_eq!(empty_files.len(), 0);
}
#[tokio::test]
async fn test_storage_manager_stream_operations() {
let cfg = test_config_memory();
let storage = StorageManager::new(&cfg)
.await
.expect("create storage manager");
let location = "stream/test.bin";
let content = vec![42u8; 1024 * 64]; // 64KB of data
// Put large data
storage
.put(location, Bytes::from(content.clone()))
.await
.expect("put large data");
// Get as stream
let mut stream = storage.get_stream(location).await.expect("get stream");
let mut collected = Vec::new();
while let Some(chunk) = stream.next().await {
let chunk = chunk.expect("stream chunk");
collected.extend_from_slice(&chunk);
}
assert_eq!(collected, content);
}
#[tokio::test]
async fn test_storage_manager_with_custom_backend() {
use object_store::memory::InMemory;
// Create custom memory backend
let custom_store = InMemory::new();
let storage = StorageManager::with_backend(Arc::new(custom_store), StorageKind::Memory);
let location = "custom/test.txt";
let data = b"custom backend test";
// Test operations with custom backend
storage
.put(location, Bytes::from(data.to_vec()))
.await
.expect("put");
let retrieved = storage.get(location).await.expect("get");
assert_eq!(retrieved.as_ref(), data);
let stream = get_stream_at(&location, &cfg).await.expect("stream");
let combined: Vec<u8> = stream
.map_ok(|chunk| chunk.to_vec())
.try_fold(Vec::new(), |mut acc, mut chunk| async move {
acc.append(&mut chunk);
Ok(acc)
})
assert!(storage.exists(location).await.expect("exists"));
assert_eq!(*storage.backend_kind(), StorageKind::Memory);
}
#[tokio::test]
async fn test_storage_manager_error_handling() {
let cfg = test_config_memory();
let storage = StorageManager::new(&cfg)
.await
.expect("collect");
.expect("create storage manager");
assert_eq!(combined, content);
// Test getting non-existent file
let result = storage.get("nonexistent.txt").await;
assert!(result.is_err());
delete_prefix_at(&split_object_path(&location).unwrap().0, &cfg)
// Test checking existence of non-existent file
let exists = storage
.exists("nonexistent.txt")
.await
.ok();
.expect("exists check");
assert!(!exists);
let _ = tokio::fs::remove_dir_all(&base).await;
// Test listing with invalid location (should not panic)
let _result = storage.get("").await;
// This may or may not error depending on the backend implementation
// The important thing is that it doesn't panic
}
// TestStorageManager tests
#[tokio::test]
async fn test_test_storage_manager_memory() {
let test_storage = testing::TestStorageManager::new_memory()
.await
.expect("create test storage");
let location = "test/storage/file.txt";
let data = b"test data with TestStorageManager";
// Test put and get
test_storage.put(location, data).await.expect("put");
let retrieved = test_storage.get(location).await.expect("get");
assert_eq!(retrieved.as_ref(), data);
// Test existence check
assert!(test_storage.exists(location).await.expect("exists"));
// Test list
let files = test_storage
.list(Some("test/storage/"))
.await
.expect("list");
assert_eq!(files.len(), 1);
// Test delete
test_storage
.delete_prefix("test/storage/")
.await
.expect("delete");
assert!(!test_storage
.exists(location)
.await
.expect("exists after delete"));
}
#[tokio::test]
async fn test_test_storage_manager_local() {
let test_storage = testing::TestStorageManager::new_local()
.await
.expect("create test storage");
let location = "test/local/file.txt";
let data = b"test data with local TestStorageManager";
// Test put and get
test_storage.put(location, data).await.expect("put");
let retrieved = test_storage.get(location).await.expect("get");
assert_eq!(retrieved.as_ref(), data);
// Test existence check
assert!(test_storage.exists(location).await.expect("exists"));
// The storage should be automatically cleaned up when test_storage is dropped
}
#[tokio::test]
async fn test_test_storage_manager_isolation() {
let storage1 = testing::TestStorageManager::new_memory()
.await
.expect("create test storage 1");
let storage2 = testing::TestStorageManager::new_memory()
.await
.expect("create test storage 2");
let location = "isolation/test.txt";
let data1 = b"storage 1 data";
let data2 = b"storage 2 data";
// Put different data in each storage
storage1.put(location, data1).await.expect("put storage 1");
storage2.put(location, data2).await.expect("put storage 2");
// Verify isolation
let retrieved1 = storage1.get(location).await.expect("get storage 1");
let retrieved2 = storage2.get(location).await.expect("get storage 2");
assert_eq!(retrieved1.as_ref(), data1);
assert_eq!(retrieved2.as_ref(), data2);
assert_ne!(retrieved1.as_ref(), retrieved2.as_ref());
}
#[tokio::test]
async fn test_test_storage_manager_config() {
let cfg = testing::test_config_memory();
let test_storage = testing::TestStorageManager::with_config(&cfg)
.await
.expect("create test storage with config");
let location = "config/test.txt";
let data = b"test data with custom config";
test_storage.put(location, data).await.expect("put");
let retrieved = test_storage.get(location).await.expect("get");
assert_eq!(retrieved.as_ref(), data);
// Verify it's using memory backend
assert_eq!(*test_storage.storage().backend_kind(), StorageKind::Memory);
}
}

View File

@@ -1,4 +1,5 @@
use axum_typed_multipart::FieldData;
use bytes;
use mime_guess::from_path;
use object_store::Error as ObjectStoreError;
use sha2::{Digest, Sha256};
@@ -13,9 +14,8 @@ use uuid::Uuid;
use crate::{
error::AppError,
storage::{db::SurrealDbClient, store},
storage::{db::SurrealDbClient, store, store::StorageManager},
stored_object,
utils::config::AppConfig,
};
#[derive(Error, Debug)]
@@ -51,54 +51,6 @@ stored_object!(FileInfo, "file", {
});
impl FileInfo {
pub async fn new(
field_data: FieldData<NamedTempFile>,
db_client: &SurrealDbClient,
user_id: &str,
config: &AppConfig,
) -> Result<Self, FileError> {
let file = field_data.contents;
let file_name = field_data
.metadata
.file_name
.ok_or(FileError::MissingFileName)?;
// Calculate SHA256
let sha256 = Self::get_sha(&file).await?;
// Early return if file already exists
match Self::get_by_sha(&sha256, db_client).await {
Ok(existing_file) => {
info!("File already exists with SHA256: {}", sha256);
return Ok(existing_file);
}
Err(FileError::FileNotFound(_)) => (), // Expected case for new files
Err(e) => return Err(e), // Propagate unexpected errors
}
// Generate UUID and prepare paths
let uuid = Uuid::new_v4();
let sanitized_file_name = Self::sanitize_file_name(&file_name);
let now = Utc::now();
// Create new FileInfo instance
let file_info = Self {
id: uuid.to_string(),
created_at: now,
updated_at: now,
file_name,
sha256,
path: Self::persist_file(&uuid, file, &sanitized_file_name, user_id, config).await?,
mime_type: Self::guess_mime_type(Path::new(&sanitized_file_name)),
user_id: user_id.to_string(),
};
// Store in database
db_client.store_item(file_info.clone()).await?;
Ok(file_info)
}
/// Guesses the MIME type based on the file extension.
///
/// # Arguments
@@ -167,36 +119,6 @@ impl FileInfo {
}
}
/// Persists the file under the logical location `{user_id}/{uuid}/{file_name}`.
///
/// # Arguments
/// * `uuid` - The UUID of the file.
/// * `file` - The temporary file to persist.
/// * `file_name` - The sanitized file name.
/// * `user-id` - User id
/// * `config` - Application configuration containing data directory path
///
/// # Returns
/// * `Result<String, FileError>` - The logical object location or an error.
async fn persist_file(
uuid: &Uuid,
file: NamedTempFile,
file_name: &str,
user_id: &str,
config: &AppConfig,
) -> Result<String, FileError> {
// Logical object location relative to the store root
let location = format!("{}/{}/{}", user_id, uuid, file_name);
info!("Persisting to object location: {}", location);
let bytes = tokio::fs::read(file.path()).await?;
store::put_bytes_at(&location, bytes.into(), config)
.await
.map_err(FileError::from)?;
Ok(location)
}
/// Retrieves a `FileInfo` by SHA256.
///
/// # Arguments
@@ -215,41 +137,6 @@ impl FileInfo {
.ok_or(FileError::FileNotFound(sha256.to_string()))
}
/// Removes FileInfo from database and file from disk
///
/// # Arguments
/// * `id` - Id of the FileInfo
/// * `db_client` - Reference to SurrealDbClient
///
/// # Returns
/// `Result<(), FileError>`
pub async fn delete_by_id(
id: &str,
db_client: &SurrealDbClient,
config: &AppConfig,
) -> Result<(), AppError> {
// Get the FileInfo from the database
let Some(file_info) = db_client.get_item::<FileInfo>(id).await? else {
return Ok(());
};
// Remove the object's parent prefix in the object store
let (parent_prefix, _file_name) = store::split_object_path(&file_info.path)
.map_err(|e| AppError::from(anyhow::anyhow!(e)))?;
store::delete_prefix_at(&parent_prefix, config)
.await
.map_err(|e| AppError::from(anyhow::anyhow!(e)))?;
info!(
"Removed object prefix {} and its contents via object_store",
parent_prefix
);
// Delete the FileInfo from the database
db_client.delete_item::<FileInfo>(id).await?;
Ok(())
}
/// Retrieves a `FileInfo` by its ID.
///
/// # Arguments
@@ -265,15 +152,166 @@ impl FileInfo {
Err(e) => Err(FileError::SurrealError(e)),
}
}
/// Create a new FileInfo using StorageManager for persistent storage operations.
///
/// # Arguments
/// * `field_data` - The uploaded file data
/// * `db_client` - Reference to the SurrealDbClient
/// * `user_id` - The user ID
/// * `storage` - A StorageManager instance for storage operations
///
/// # Returns
/// * `Result<Self, FileError>` - The created FileInfo or an error
pub async fn new_with_storage(
field_data: FieldData<NamedTempFile>,
db_client: &SurrealDbClient,
user_id: &str,
storage: &StorageManager,
) -> Result<Self, FileError> {
let file = field_data.contents;
let file_name = field_data
.metadata
.file_name
.ok_or(FileError::MissingFileName)?;
let original_file_name = file_name.clone();
// Calculate SHA256
let sha256 = Self::get_sha(&file).await?;
// Early return if file already exists
match Self::get_by_sha(&sha256, db_client).await {
Ok(existing_file) => {
info!("File already exists with SHA256: {}", sha256);
return Ok(existing_file);
}
Err(FileError::FileNotFound(_)) => (), // Expected case for new files
Err(e) => return Err(e), // Propagate unexpected errors
}
// Generate UUID and prepare paths
let uuid = Uuid::new_v4();
let sanitized_file_name = Self::sanitize_file_name(&file_name);
let now = Utc::now();
let path =
Self::persist_file_with_storage(&uuid, file, &sanitized_file_name, user_id, storage)
.await?;
// Create FileInfo struct
let file_info = FileInfo {
id: uuid.to_string(),
user_id: user_id.to_string(),
sha256,
file_name: original_file_name,
path,
mime_type: Self::guess_mime_type(Path::new(&file_name)),
created_at: now,
updated_at: now,
};
// Store in database
db_client
.store_item(file_info.clone())
.await
.map_err(FileError::SurrealError)?;
Ok(file_info)
}
/// Delete a FileInfo by ID using StorageManager for storage operations.
///
/// # Arguments
/// * `id` - ID of the FileInfo
/// * `db_client` - Reference to SurrealDbClient
/// * `storage` - A StorageManager instance for storage operations
///
/// # Returns
/// * `Result<(), AppError>` - Success or error
pub async fn delete_by_id_with_storage(
id: &str,
db_client: &SurrealDbClient,
storage: &StorageManager,
) -> Result<(), AppError> {
// Get the FileInfo from the database
let Some(file_info) = db_client.get_item::<FileInfo>(id).await? else {
return Ok(());
};
// Remove the object's parent prefix in the object store
let (parent_prefix, _file_name) = store::split_object_path(&file_info.path)
.map_err(|e| AppError::from(anyhow::anyhow!(e)))?;
storage
.delete_prefix(&parent_prefix)
.await
.map_err(|e| AppError::from(anyhow::anyhow!(e)))?;
info!(
"Removed object prefix {} and its contents via StorageManager",
parent_prefix
);
// Delete the FileInfo from the database
db_client.delete_item::<FileInfo>(id).await?;
Ok(())
}
/// Retrieve file content using StorageManager for storage operations.
///
/// # Arguments
/// * `storage` - A StorageManager instance for storage operations
///
/// # Returns
/// * `Result<bytes::Bytes, AppError>` - The file content or an error
pub async fn get_content_with_storage(
&self,
storage: &StorageManager,
) -> Result<bytes::Bytes, AppError> {
storage
.get(&self.path)
.await
.map_err(|e: object_store::Error| AppError::from(anyhow::anyhow!(e)))
}
/// Persist file to storage using StorageManager.
///
/// # Arguments
/// * `uuid` - The UUID for the file
/// * `file` - The temporary file to persist
/// * `file_name` - The name of the file
/// * `user_id` - The user ID
/// * `storage` - A StorageManager instance for storage operations
///
/// # Returns
/// * `Result<String, FileError>` - The logical object location or an error.
async fn persist_file_with_storage(
uuid: &Uuid,
file: NamedTempFile,
file_name: &str,
user_id: &str,
storage: &StorageManager,
) -> Result<String, FileError> {
// Logical object location relative to the store root
let location = format!("{}/{}/{}", user_id, uuid, file_name);
info!("Persisting to object location: {}", location);
let bytes = tokio::fs::read(file.path()).await?;
storage
.put(&location, bytes.into())
.await
.map_err(FileError::from)?;
Ok(location)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::utils::config::{PdfIngestMode::LlmFirst, StorageKind};
use crate::storage::store::testing::TestStorageManager;
use axum::http::HeaderMap;
use axum_typed_multipart::FieldMetadata;
use std::io::Write;
use std::{io::Write, path::Path};
use tempfile::NamedTempFile;
/// Creates a test temporary file with the given content
@@ -299,45 +337,39 @@ mod tests {
}
#[tokio::test]
async fn test_fileinfo_create_read_delete() {
// Setup in-memory database for testing
async fn test_fileinfo_create_read_delete_with_storage_manager() {
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
db.apply_migrations().await.unwrap();
// Create a test file
let content = b"This is a test file for cross-filesystem operations";
let file_name = "cross_fs_test.txt";
let content = b"This is a test file for StorageManager operations";
let file_name = "storage_manager_test.txt";
let field_data = create_test_file(content, file_name);
// Create a FileInfo instance with data_dir in /tmp
// Create test storage manager (memory backend)
let test_storage = store::testing::TestStorageManager::new_memory()
.await
.expect("Failed to create test storage manager");
// Create a FileInfo instance with storage manager
let user_id = "test_user";
let config = AppConfig {
data_dir: "/tmp/minne_test_data".to_string(), // Using /tmp which is typically on a different filesystem
openai_api_key: "test_key".to_string(),
surrealdb_address: "test_address".to_string(),
surrealdb_username: "test_user".to_string(),
surrealdb_password: "test_pass".to_string(),
surrealdb_namespace: "test_ns".to_string(),
surrealdb_database: "test_db".to_string(),
http_port: 3000,
openai_base_url: "..".to_string(),
storage: StorageKind::Local,
pdf_ingest_mode: LlmFirst,
};
// Test file creation
let file_info = FileInfo::new(field_data, &db, user_id, &config)
.await
.expect("Failed to create file across filesystems");
// Test file creation with StorageManager
let file_info =
FileInfo::new_with_storage(field_data, &db, user_id, test_storage.storage())
.await
.expect("Failed to create file with StorageManager");
assert_eq!(file_info.file_name, file_name);
// Verify the file exists via object_store and has correct content
let bytes = store::get_bytes_at(&file_info.path, &config)
// Verify the file exists via StorageManager and has correct content
let bytes = file_info
.get_content_with_storage(test_storage.storage())
.await
.expect("Failed to read file content via object_store");
assert_eq!(bytes, content.as_slice());
.expect("Failed to read file content via StorageManager");
assert_eq!(bytes.as_ref(), content);
// Test file reading
let retrieved = FileInfo::get_by_id(&file_info.id, &db)
@@ -345,63 +377,89 @@ mod tests {
.expect("Failed to retrieve file info");
assert_eq!(retrieved.id, file_info.id);
assert_eq!(retrieved.sha256, file_info.sha256);
assert_eq!(retrieved.file_name, file_name);
// Test file deletion
FileInfo::delete_by_id(&file_info.id, &db, &config)
// Test file deletion with StorageManager
FileInfo::delete_by_id_with_storage(&file_info.id, &db, test_storage.storage())
.await
.expect("Failed to delete file");
assert!(
store::get_bytes_at(&file_info.path, &config).await.is_err(),
"File should be deleted"
);
.expect("Failed to delete file with StorageManager");
// Clean up the test directory
let _ = tokio::fs::remove_dir_all(&config.data_dir).await;
let deleted_result = file_info
.get_content_with_storage(test_storage.storage())
.await;
assert!(deleted_result.is_err(), "File should be deleted");
// No cleanup needed - TestStorageManager handles it automatically
}
#[tokio::test]
async fn test_fileinfo_duplicate_detection() {
// Setup in-memory database for testing
async fn test_fileinfo_preserves_original_filename_and_sanitizes_path() {
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
db.apply_migrations().await.unwrap();
// Create a test file
let content = b"This is a test file for cross-filesystem duplicate detection";
let file_name = "cross_fs_duplicate.txt";
let content = b"filename sanitization";
let original_name = "Complex name (1).txt";
let expected_sanitized = "Complex_name__1_.txt";
let field_data = create_test_file(content, original_name);
let test_storage = store::testing::TestStorageManager::new_memory()
.await
.expect("Failed to create test storage manager");
let file_info =
FileInfo::new_with_storage(field_data, &db, "sanitized_user", test_storage.storage())
.await
.expect("Failed to create file via storage manager");
assert_eq!(file_info.file_name, original_name);
let stored_name = Path::new(&file_info.path)
.file_name()
.and_then(|name| name.to_str())
.expect("stored name");
assert_eq!(stored_name, expected_sanitized);
}
#[tokio::test]
async fn test_fileinfo_duplicate_detection_with_storage_manager() {
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
db.apply_migrations().await.unwrap();
let content = b"This is a test file for StorageManager duplicate detection";
let file_name = "storage_manager_duplicate.txt";
let field_data = create_test_file(content, file_name);
// Create a FileInfo instance with data_dir in /tmp
// Create test storage manager
let test_storage = store::testing::TestStorageManager::new_memory()
.await
.expect("Failed to create test storage manager");
// Create a FileInfo instance with storage manager
let user_id = "test_user";
let config = AppConfig {
data_dir: "/tmp/minne_test_data".to_string(),
openai_api_key: "test_key".to_string(),
surrealdb_address: "test_address".to_string(),
surrealdb_username: "test_user".to_string(),
surrealdb_password: "test_pass".to_string(),
surrealdb_namespace: "test_ns".to_string(),
surrealdb_database: "test_db".to_string(),
http_port: 3000,
openai_base_url: "..".to_string(),
storage: StorageKind::Local,
pdf_ingest_mode: LlmFirst,
};
// Store the original file
let original_file_info = FileInfo::new(field_data, &db, user_id, &config)
.await
.expect("Failed to create original file");
let original_file_info =
FileInfo::new_with_storage(field_data, &db, user_id, test_storage.storage())
.await
.expect("Failed to create original file with StorageManager");
// Create another file with the same content but different name
let duplicate_name = "cross_fs_duplicate_2.txt";
let duplicate_name = "storage_manager_duplicate_2.txt";
let field_data2 = create_test_file(content, duplicate_name);
// The system should detect it's the same file and return the original FileInfo
let duplicate_file_info = FileInfo::new(field_data2, &db, user_id, &config)
.await
.expect("Failed to process duplicate file");
let duplicate_file_info =
FileInfo::new_with_storage(field_data2, &db, user_id, test_storage.storage())
.await
.expect("Failed to process duplicate file with StorageManager");
// Verify duplicate detection worked
assert_eq!(duplicate_file_info.id, original_file_info.id);
@@ -409,46 +467,48 @@ mod tests {
assert_eq!(duplicate_file_info.file_name, file_name);
assert_ne!(duplicate_file_info.file_name, duplicate_name);
// Clean up
FileInfo::delete_by_id(&original_file_info.id, &db, &config)
// Verify both files have the same content (they should point to the same file)
let original_content = original_file_info
.get_content_with_storage(test_storage.storage())
.await
.expect("Failed to delete file");
let _ = tokio::fs::remove_dir_all(&config.data_dir).await;
.unwrap();
let duplicate_content = duplicate_file_info
.get_content_with_storage(test_storage.storage())
.await
.unwrap();
assert_eq!(original_content.as_ref(), content);
assert_eq!(duplicate_content.as_ref(), content);
// Clean up
FileInfo::delete_by_id_with_storage(&original_file_info.id, &db, test_storage.storage())
.await
.expect("Failed to delete original file with StorageManager");
}
#[tokio::test]
async fn test_file_creation() {
// Setup in-memory database for testing
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
db.apply_migrations()
.await
.expect("Failed to apply migrations");
// Create a test file
let content = b"This is a test file content";
let file_name = "test_file.txt";
let field_data = create_test_file(content, file_name);
// Create a FileInfo instance
// Create a FileInfo instance with StorageManager
let user_id = "test_user";
let config = AppConfig {
data_dir: "./data".to_string(),
openai_api_key: "test_key".to_string(),
surrealdb_address: "test_address".to_string(),
surrealdb_username: "test_user".to_string(),
surrealdb_password: "test_pass".to_string(),
surrealdb_namespace: "test_ns".to_string(),
surrealdb_database: "test_db".to_string(),
http_port: 3000,
openai_base_url: "..".to_string(),
storage: StorageKind::Local,
pdf_ingest_mode: LlmFirst,
};
let file_info = FileInfo::new(field_data, &db, user_id, &config).await;
let test_storage = TestStorageManager::new_memory()
.await
.expect("create test storage manager");
let file_info =
FileInfo::new_with_storage(field_data, &db, user_id, test_storage.storage()).await;
// We can't fully test persistence to disk in unit tests,
// but we can verify the database record was created
// Verify the FileInfo was created successfully
assert!(file_info.is_ok());
let file_info = file_info.unwrap();
@@ -478,45 +538,39 @@ mod tests {
#[tokio::test]
async fn test_file_duplicate_detection() {
// Setup in-memory database for testing
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
db.apply_migrations()
.await
.expect("Failed to apply migrations");
// First, store a file with known content
let content = b"This is a test file for duplicate detection";
let file_name = "original.txt";
let user_id = "test_user";
let config = AppConfig {
data_dir: "./data".to_string(),
openai_api_key: "test_key".to_string(),
surrealdb_address: "test_address".to_string(),
surrealdb_username: "test_user".to_string(),
surrealdb_password: "test_pass".to_string(),
surrealdb_namespace: "test_ns".to_string(),
surrealdb_database: "test_db".to_string(),
http_port: 3000,
openai_base_url: "..".to_string(),
storage: StorageKind::Local,
pdf_ingest_mode: LlmFirst,
};
let test_storage = TestStorageManager::new_memory()
.await
.expect("create test storage manager");
let field_data1 = create_test_file(content, file_name);
let original_file_info = FileInfo::new(field_data1, &db, user_id, &config)
.await
.expect("Failed to create original file");
let original_file_info =
FileInfo::new_with_storage(field_data1, &db, user_id, test_storage.storage())
.await
.expect("Failed to create original file");
// Now try to store another file with the same content but different name
let duplicate_name = "duplicate.txt";
let field_data2 = create_test_file(content, duplicate_name);
// The system should detect it's the same file and return the original FileInfo
let duplicate_file_info = FileInfo::new(field_data2, &db, user_id, &config)
.await
.expect("Failed to process duplicate file");
let duplicate_file_info =
FileInfo::new_with_storage(field_data2, &db, user_id, test_storage.storage())
.await
.expect("Failed to process duplicate file");
// The returned FileInfo should match the original
assert_eq!(duplicate_file_info.id, original_file_info.id);
@@ -584,7 +638,6 @@ mod tests {
#[tokio::test]
async fn test_get_by_sha_not_found() {
// Setup in-memory database for testing
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
@@ -605,7 +658,6 @@ mod tests {
#[tokio::test]
async fn test_manual_file_info_creation() {
// Setup in-memory database for testing
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
@@ -646,35 +698,28 @@ mod tests {
#[tokio::test]
async fn test_delete_by_id() {
// Setup in-memory database for testing
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
db.apply_migrations()
.await
.expect("Failed to apply migrations");
// Create and persist a test file via FileInfo::new
// Create and persist a test file via FileInfo::new_with_storage
let user_id = "user123";
let cfg = AppConfig {
data_dir: "./data".to_string(),
openai_api_key: "".to_string(),
surrealdb_address: "".to_string(),
surrealdb_username: "".to_string(),
surrealdb_password: "".to_string(),
surrealdb_namespace: "".to_string(),
surrealdb_database: "".to_string(),
http_port: 0,
openai_base_url: "".to_string(),
storage: crate::utils::config::StorageKind::Local,
pdf_ingest_mode: LlmFirst,
};
let test_storage = TestStorageManager::new_memory()
.await
.expect("create test storage manager");
let temp = create_test_file(b"test content", "test_file.txt");
let file_info = FileInfo::new(temp, &db, user_id, &cfg)
let file_info = FileInfo::new_with_storage(temp, &db, user_id, test_storage.storage())
.await
.expect("create file");
// Delete the file
let delete_result = FileInfo::delete_by_id(&file_info.id, &db, &cfg).await;
// Delete the file using StorageManager
let delete_result =
FileInfo::delete_by_id_with_storage(&file_info.id, &db, test_storage.storage()).await;
// Delete should be successful
assert!(
@@ -693,13 +738,12 @@ mod tests {
"FileInfo should be deleted from the database"
);
// Verify content no longer retrievable
assert!(store::get_bytes_at(&file_info.path, &cfg).await.is_err());
// Verify content no longer retrievable from storage
assert!(test_storage.storage().get(&file_info.path).await.is_err());
}
#[tokio::test]
async fn test_delete_by_id_not_found() {
// Setup in-memory database for testing
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
@@ -707,31 +751,16 @@ mod tests {
.expect("Failed to start in-memory surrealdb");
// Try to delete a file that doesn't exist
let result = FileInfo::delete_by_id(
"nonexistent_id",
&db,
&AppConfig {
data_dir: "./data".to_string(),
openai_api_key: "".to_string(),
surrealdb_address: "".to_string(),
surrealdb_username: "".to_string(),
surrealdb_password: "".to_string(),
surrealdb_namespace: "".to_string(),
surrealdb_database: "".to_string(),
http_port: 0,
openai_base_url: "".to_string(),
storage: crate::utils::config::StorageKind::Local,
pdf_ingest_mode: LlmFirst,
},
)
.await;
let test_storage = TestStorageManager::new_memory().await.unwrap();
let result =
FileInfo::delete_by_id_with_storage("nonexistent_id", &db, test_storage.storage())
.await;
// Should succeed even if the file record does not exist
assert!(result.is_ok());
}
#[tokio::test]
async fn test_get_by_id() {
// Setup in-memory database for testing
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
@@ -772,7 +801,6 @@ mod tests {
#[tokio::test]
async fn test_get_by_id_not_found() {
// Setup in-memory database for testing
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
@@ -796,55 +824,197 @@ mod tests {
}
}
// StorageManager-based tests
#[tokio::test]
async fn test_fileinfo_persist_with_custom_root() {
// Setup in-memory database for testing
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
async fn test_file_info_new_with_storage_memory() {
// Setup
let db = SurrealDbClient::memory("test_ns", "test_file_storage_memory")
.await
.expect("Failed to start in-memory surrealdb");
.unwrap();
db.apply_migrations().await.unwrap();
// Create a test file
let content = b"This is a test file for data directory configuration";
let file_name = "data_dir_test.txt";
let field_data = create_test_file(content, file_name);
// Create a FileInfo instance with a custom data directory
let content = b"This is a test file for StorageManager";
let field_data = create_test_file(content, "test_storage.txt");
let user_id = "test_user";
let custom_data_dir = "/tmp/minne_custom_data_dir";
let config = AppConfig {
data_dir: custom_data_dir.to_string(),
openai_api_key: "test_key".to_string(),
surrealdb_address: "test_address".to_string(),
surrealdb_username: "test_user".to_string(),
surrealdb_password: "test_pass".to_string(),
surrealdb_namespace: "test_ns".to_string(),
surrealdb_database: "test_db".to_string(),
http_port: 3000,
openai_base_url: "..".to_string(),
storage: StorageKind::Local,
pdf_ingest_mode: LlmFirst,
};
// Test file creation
let file_info = FileInfo::new(field_data, &db, user_id, &config)
// Create test storage manager
let storage = store::testing::TestStorageManager::new_memory()
.await
.expect("Failed to create file in custom data directory");
.unwrap();
// Verify the file has the correct content via object_store
let file_content = store::get_bytes_at(&file_info.path, &config)
// Test file creation with StorageManager
let file_info = FileInfo::new_with_storage(field_data, &db, user_id, storage.storage())
.await
.expect("Failed to read file content");
assert_eq!(file_content.as_ref(), content);
.expect("Failed to create file with StorageManager");
// Test file deletion
FileInfo::delete_by_id(&file_info.id, &db, &config)
// Verify the file was created correctly
assert_eq!(file_info.user_id, user_id);
assert_eq!(file_info.file_name, "test_storage.txt");
assert!(!file_info.sha256.is_empty());
assert!(!file_info.path.is_empty());
// Test content retrieval with StorageManager
let retrieved_content = file_info
.get_content_with_storage(storage.storage())
.await
.expect("Failed to delete file");
assert!(store::get_bytes_at(&file_info.path, &config).await.is_err());
.expect("Failed to get file content with StorageManager");
assert_eq!(retrieved_content.as_ref(), content);
// Clean up the test directory
let _ = tokio::fs::remove_dir_all(custom_data_dir).await;
// Test file deletion with StorageManager
FileInfo::delete_by_id_with_storage(&file_info.id, &db, storage.storage())
.await
.expect("Failed to delete file with StorageManager");
// Verify file is deleted
let deleted_content_result = file_info.get_content_with_storage(storage.storage()).await;
assert!(deleted_content_result.is_err());
}
#[tokio::test]
async fn test_file_info_new_with_storage_local() {
// Setup
let db = SurrealDbClient::memory("test_ns", "test_file_storage_local")
.await
.unwrap();
db.apply_migrations().await.unwrap();
let content = b"This is a test file for StorageManager with local storage";
let field_data = create_test_file(content, "test_local.txt");
let user_id = "test_user";
// Create test storage manager with local backend
let storage = store::testing::TestStorageManager::new_local()
.await
.unwrap();
// Test file creation with StorageManager
let file_info = FileInfo::new_with_storage(field_data, &db, user_id, storage.storage())
.await
.expect("Failed to create file with StorageManager");
// Verify the file was created correctly
assert_eq!(file_info.user_id, user_id);
assert_eq!(file_info.file_name, "test_local.txt");
assert!(!file_info.sha256.is_empty());
assert!(!file_info.path.is_empty());
// Test content retrieval with StorageManager
let retrieved_content = file_info
.get_content_with_storage(storage.storage())
.await
.expect("Failed to get file content with StorageManager");
assert_eq!(retrieved_content.as_ref(), content);
// Test file deletion with StorageManager
FileInfo::delete_by_id_with_storage(&file_info.id, &db, storage.storage())
.await
.expect("Failed to delete file with StorageManager");
// Verify file is deleted
let deleted_content_result = file_info.get_content_with_storage(storage.storage()).await;
assert!(deleted_content_result.is_err());
}
#[tokio::test]
async fn test_file_info_storage_manager_persistence() {
// Setup
let db = SurrealDbClient::memory("test_ns", "test_file_persistence")
.await
.unwrap();
db.apply_migrations().await.unwrap();
let content = b"Test content for persistence";
let field_data = create_test_file(content, "persistence_test.txt");
let user_id = "test_user";
// Create test storage manager
let storage = store::testing::TestStorageManager::new_memory()
.await
.unwrap();
// Create file
let file_info = FileInfo::new_with_storage(field_data, &db, user_id, storage.storage())
.await
.expect("Failed to create file");
// Test that data persists across multiple operations with the same StorageManager
let retrieved_content_1 = file_info
.get_content_with_storage(storage.storage())
.await
.unwrap();
let retrieved_content_2 = file_info
.get_content_with_storage(storage.storage())
.await
.unwrap();
assert_eq!(retrieved_content_1.as_ref(), content);
assert_eq!(retrieved_content_2.as_ref(), content);
// Test that different StorageManager instances don't share data (memory storage isolation)
let storage2 = store::testing::TestStorageManager::new_memory()
.await
.unwrap();
let isolated_content_result = file_info.get_content_with_storage(storage2.storage()).await;
assert!(
isolated_content_result.is_err(),
"Different StorageManager should not have access to same data"
);
}
#[tokio::test]
async fn test_file_info_storage_manager_equivalence() {
// Setup
let db = SurrealDbClient::memory("test_ns", "test_file_equivalence")
.await
.unwrap();
db.apply_migrations().await.unwrap();
let content = b"Test content for equivalence testing";
let field_data1 = create_test_file(content, "equivalence_test_1.txt");
let field_data2 = create_test_file(content, "equivalence_test_2.txt");
let user_id = "test_user";
// Create single storage manager and reuse it
let storage_manager = store::testing::TestStorageManager::new_memory()
.await
.unwrap();
let storage = storage_manager.storage();
// Create multiple files with the same storage manager
let file_info_1 = FileInfo::new_with_storage(field_data1, &db, user_id, &storage)
.await
.expect("Failed to create file 1");
let file_info_2 = FileInfo::new_with_storage(field_data2, &db, user_id, &storage)
.await
.expect("Failed to create file 2");
// Test that both files can be retrieved with the same storage backend
let content_1 = file_info_1
.get_content_with_storage(&storage)
.await
.unwrap();
let content_2 = file_info_2
.get_content_with_storage(&storage)
.await
.unwrap();
assert_eq!(content_1.as_ref(), content);
assert_eq!(content_2.as_ref(), content);
// Test that files can be deleted with the same storage manager
FileInfo::delete_by_id_with_storage(&file_info_1.id, &db, &storage)
.await
.unwrap();
FileInfo::delete_by_id_with_storage(&file_info_2.id, &db, &storage)
.await
.unwrap();
// Verify files are deleted
let deleted_content_1 = file_info_1.get_content_with_storage(&storage).await;
let deleted_content_2 = file_info_2.get_content_with_storage(&storage).await;
assert!(deleted_content_1.is_err());
assert!(deleted_content_2.is_err());
}
}

View File

@@ -1,116 +1,529 @@
use futures::Stream;
use surrealdb::{opt::PatchOp, Notification};
use std::time::Duration;
use chrono::Duration as ChronoDuration;
use state_machines::state_machine;
use surrealdb::sql::Datetime as SurrealDatetime;
use uuid::Uuid;
use crate::{error::AppError, storage::db::SurrealDbClient, stored_object};
use super::ingestion_payload::IngestionPayload;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(tag = "name")]
pub enum IngestionTaskStatus {
Created,
InProgress {
attempts: u32,
last_attempt: DateTime<Utc>,
},
Completed,
Error {
message: String,
},
pub const MAX_ATTEMPTS: u32 = 3;
pub const DEFAULT_LEASE_SECS: i64 = 300;
pub const DEFAULT_PRIORITY: i32 = 0;
#[derive(Debug, Default, Clone, serde::Serialize, serde::Deserialize, PartialEq, Eq)]
pub enum TaskState {
#[serde(rename = "Pending")]
#[default]
Pending,
#[serde(rename = "Reserved")]
Reserved,
#[serde(rename = "Processing")]
Processing,
#[serde(rename = "Succeeded")]
Succeeded,
#[serde(rename = "Failed")]
Failed,
#[serde(rename = "Cancelled")]
Cancelled,
#[serde(rename = "DeadLetter")]
DeadLetter,
}
impl TaskState {
pub fn as_str(&self) -> &'static str {
match self {
TaskState::Pending => "Pending",
TaskState::Reserved => "Reserved",
TaskState::Processing => "Processing",
TaskState::Succeeded => "Succeeded",
TaskState::Failed => "Failed",
TaskState::Cancelled => "Cancelled",
TaskState::DeadLetter => "DeadLetter",
}
}
pub fn is_terminal(&self) -> bool {
matches!(
self,
TaskState::Succeeded | TaskState::Cancelled | TaskState::DeadLetter
)
}
pub fn display_label(&self) -> &'static str {
match self {
TaskState::Pending => "Pending",
TaskState::Reserved => "Reserved",
TaskState::Processing => "Processing",
TaskState::Succeeded => "Completed",
TaskState::Failed => "Retrying",
TaskState::Cancelled => "Cancelled",
TaskState::DeadLetter => "Dead Letter",
}
}
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq, Eq, Default)]
pub struct TaskErrorInfo {
pub code: Option<String>,
pub message: String,
}
#[derive(Debug, Clone, Copy)]
enum TaskTransition {
StartProcessing,
Succeed,
Fail,
Cancel,
DeadLetter,
Release,
}
impl TaskTransition {
fn as_str(&self) -> &'static str {
match self {
TaskTransition::StartProcessing => "start_processing",
TaskTransition::Succeed => "succeed",
TaskTransition::Fail => "fail",
TaskTransition::Cancel => "cancel",
TaskTransition::DeadLetter => "deadletter",
TaskTransition::Release => "release",
}
}
}
mod lifecycle {
use super::state_machine;
state_machine! {
name: TaskLifecycleMachine,
initial: Pending,
states: [Pending, Reserved, Processing, Succeeded, Failed, Cancelled, DeadLetter],
events {
reserve {
transition: { from: Pending, to: Reserved }
transition: { from: Failed, to: Reserved }
}
start_processing {
transition: { from: Reserved, to: Processing }
}
succeed {
transition: { from: Processing, to: Succeeded }
}
fail {
transition: { from: Processing, to: Failed }
}
cancel {
transition: { from: Pending, to: Cancelled }
transition: { from: Reserved, to: Cancelled }
transition: { from: Processing, to: Cancelled }
}
deadletter {
transition: { from: Failed, to: DeadLetter }
}
release {
transition: { from: Reserved, to: Pending }
}
}
}
pub(super) fn pending() -> TaskLifecycleMachine<(), Pending> {
TaskLifecycleMachine::new(())
}
pub(super) fn reserved() -> TaskLifecycleMachine<(), Reserved> {
pending()
.reserve()
.expect("reserve transition from Pending should exist")
}
pub(super) fn processing() -> TaskLifecycleMachine<(), Processing> {
reserved()
.start_processing()
.expect("start_processing transition from Reserved should exist")
}
pub(super) fn failed() -> TaskLifecycleMachine<(), Failed> {
processing()
.fail()
.expect("fail transition from Processing should exist")
}
}
fn invalid_transition(state: &TaskState, event: TaskTransition) -> AppError {
AppError::Validation(format!(
"Invalid task transition: {} -> {}",
state.as_str(),
event.as_str()
))
}
stored_object!(IngestionTask, "ingestion_task", {
content: IngestionPayload,
status: IngestionTaskStatus,
user_id: String
state: TaskState,
user_id: String,
attempts: u32,
max_attempts: u32,
#[serde(serialize_with = "serialize_datetime", deserialize_with = "deserialize_datetime")]
scheduled_at: chrono::DateTime<chrono::Utc>,
#[serde(
serialize_with = "serialize_option_datetime",
deserialize_with = "deserialize_option_datetime",
default
)]
locked_at: Option<chrono::DateTime<chrono::Utc>>,
lease_duration_secs: i64,
worker_id: Option<String>,
error_code: Option<String>,
error_message: Option<String>,
#[serde(
serialize_with = "serialize_option_datetime",
deserialize_with = "deserialize_option_datetime",
default
)]
last_error_at: Option<chrono::DateTime<chrono::Utc>>,
priority: i32
});
pub const MAX_ATTEMPTS: u32 = 3;
impl IngestionTask {
pub async fn new(content: IngestionPayload, user_id: String) -> Self {
let now = Utc::now();
pub fn new(content: IngestionPayload, user_id: String) -> Self {
let now = chrono::Utc::now();
Self {
id: Uuid::new_v4().to_string(),
content,
status: IngestionTaskStatus::Created,
state: TaskState::Pending,
user_id,
attempts: 0,
max_attempts: MAX_ATTEMPTS,
scheduled_at: now,
locked_at: None,
lease_duration_secs: DEFAULT_LEASE_SECS,
worker_id: None,
error_code: None,
error_message: None,
last_error_at: None,
priority: DEFAULT_PRIORITY,
created_at: now,
updated_at: now,
user_id,
}
}
/// Creates a new job and stores it in the database
pub fn can_retry(&self) -> bool {
self.attempts < self.max_attempts
}
pub fn lease_duration(&self) -> Duration {
Duration::from_secs(self.lease_duration_secs.max(0) as u64)
}
pub async fn create_and_add_to_db(
content: IngestionPayload,
user_id: String,
db: &SurrealDbClient,
) -> Result<IngestionTask, AppError> {
let task = Self::new(content, user_id).await;
let task = Self::new(content, user_id);
db.store_item(task.clone()).await?;
Ok(task)
}
// Update job status
pub async fn update_status(
id: &str,
status: IngestionTaskStatus,
pub async fn claim_next_ready(
db: &SurrealDbClient,
) -> Result<(), AppError> {
let _job: Option<Self> = db
.update((Self::table_name(), id))
.patch(PatchOp::replace("/status", status))
.patch(PatchOp::replace(
"/updated_at",
surrealdb::Datetime::from(Utc::now()),
worker_id: &str,
now: chrono::DateTime<chrono::Utc>,
lease_duration: Duration,
) -> Result<Option<IngestionTask>, AppError> {
debug_assert!(lifecycle::pending().reserve().is_ok());
debug_assert!(lifecycle::failed().reserve().is_ok());
const CLAIM_QUERY: &str = r#"
UPDATE (
SELECT * FROM type::table($table)
WHERE state IN $candidate_states
AND scheduled_at <= $now
AND (
attempts < max_attempts
OR state IN $sticky_states
)
AND (
locked_at = NONE
OR time::unix($now) - time::unix(locked_at) >= lease_duration_secs
)
ORDER BY priority DESC, scheduled_at ASC, created_at ASC
LIMIT 1
)
SET state = $reserved_state,
attempts = if state IN $increment_states THEN
if attempts + 1 > max_attempts THEN max_attempts ELSE attempts + 1 END
ELSE
attempts
END,
locked_at = $now,
worker_id = $worker_id,
lease_duration_secs = $lease_secs,
updated_at = $now
RETURN *;
"#;
let mut result = db
.client
.query(CLAIM_QUERY)
.bind(("table", Self::table_name()))
.bind((
"candidate_states",
vec![
TaskState::Pending.as_str(),
TaskState::Failed.as_str(),
TaskState::Reserved.as_str(),
TaskState::Processing.as_str(),
],
))
.bind((
"sticky_states",
vec![TaskState::Reserved.as_str(), TaskState::Processing.as_str()],
))
.bind((
"increment_states",
vec![TaskState::Pending.as_str(), TaskState::Failed.as_str()],
))
.bind(("reserved_state", TaskState::Reserved.as_str()))
.bind(("now", SurrealDatetime::from(now)))
.bind(("worker_id", worker_id.to_string()))
.bind(("lease_secs", lease_duration.as_secs() as i64))
.await?;
Ok(())
let task: Option<IngestionTask> = result.take(0)?;
Ok(task)
}
/// Listen for new jobs
pub async fn listen_for_tasks(
pub async fn mark_processing(&self, db: &SurrealDbClient) -> Result<IngestionTask, AppError> {
const START_PROCESSING_QUERY: &str = r#"
UPDATE type::thing($table, $id)
SET state = $processing,
updated_at = $now,
locked_at = $now
WHERE state = $reserved AND worker_id = $worker_id
RETURN *;
"#;
let now = chrono::Utc::now();
let mut result = db
.client
.query(START_PROCESSING_QUERY)
.bind(("table", Self::table_name()))
.bind(("id", self.id.clone()))
.bind(("processing", TaskState::Processing.as_str()))
.bind(("reserved", TaskState::Reserved.as_str()))
.bind(("now", SurrealDatetime::from(now)))
.bind(("worker_id", self.worker_id.clone().unwrap_or_default()))
.await?;
let updated: Option<IngestionTask> = result.take(0)?;
updated.ok_or_else(|| invalid_transition(&self.state, TaskTransition::StartProcessing))
}
pub async fn mark_succeeded(&self, db: &SurrealDbClient) -> Result<IngestionTask, AppError> {
const COMPLETE_QUERY: &str = r#"
UPDATE type::thing($table, $id)
SET state = $succeeded,
updated_at = $now,
locked_at = NONE,
worker_id = NONE,
scheduled_at = $now,
error_code = NONE,
error_message = NONE,
last_error_at = NONE
WHERE state = $processing AND worker_id = $worker_id
RETURN *;
"#;
let now = chrono::Utc::now();
let mut result = db
.client
.query(COMPLETE_QUERY)
.bind(("table", Self::table_name()))
.bind(("id", self.id.clone()))
.bind(("succeeded", TaskState::Succeeded.as_str()))
.bind(("processing", TaskState::Processing.as_str()))
.bind(("now", SurrealDatetime::from(now)))
.bind(("worker_id", self.worker_id.clone().unwrap_or_default()))
.await?;
let updated: Option<IngestionTask> = result.take(0)?;
updated.ok_or_else(|| invalid_transition(&self.state, TaskTransition::Succeed))
}
pub async fn mark_failed(
&self,
error: TaskErrorInfo,
retry_delay: Duration,
db: &SurrealDbClient,
) -> Result<impl Stream<Item = Result<Notification<Self>, surrealdb::Error>>, surrealdb::Error>
{
db.listen::<Self>().await
) -> Result<IngestionTask, AppError> {
let now = chrono::Utc::now();
let retry_at = now
+ ChronoDuration::from_std(retry_delay).unwrap_or_else(|_| ChronoDuration::seconds(30));
const FAIL_QUERY: &str = r#"
UPDATE type::thing($table, $id)
SET state = $failed,
updated_at = $now,
locked_at = NONE,
worker_id = NONE,
scheduled_at = $retry_at,
error_code = $error_code,
error_message = $error_message,
last_error_at = $now
WHERE state = $processing AND worker_id = $worker_id
RETURN *;
"#;
let mut result = db
.client
.query(FAIL_QUERY)
.bind(("table", Self::table_name()))
.bind(("id", self.id.clone()))
.bind(("failed", TaskState::Failed.as_str()))
.bind(("processing", TaskState::Processing.as_str()))
.bind(("now", SurrealDatetime::from(now)))
.bind(("retry_at", SurrealDatetime::from(retry_at)))
.bind(("error_code", error.code.clone()))
.bind(("error_message", error.message.clone()))
.bind(("worker_id", self.worker_id.clone().unwrap_or_default()))
.await?;
let updated: Option<IngestionTask> = result.take(0)?;
updated.ok_or_else(|| invalid_transition(&self.state, TaskTransition::Fail))
}
/// Get all unfinished tasks, ie newly created and in progress up two times
pub async fn get_unfinished_tasks(db: &SurrealDbClient) -> Result<Vec<Self>, AppError> {
let jobs: Vec<Self> = db
pub async fn mark_dead_letter(
&self,
error: TaskErrorInfo,
db: &SurrealDbClient,
) -> Result<IngestionTask, AppError> {
const DEAD_LETTER_QUERY: &str = r#"
UPDATE type::thing($table, $id)
SET state = $dead,
updated_at = $now,
locked_at = NONE,
worker_id = NONE,
scheduled_at = $now,
error_code = $error_code,
error_message = $error_message,
last_error_at = $now
WHERE state = $failed
RETURN *;
"#;
let now = chrono::Utc::now();
let mut result = db
.client
.query(DEAD_LETTER_QUERY)
.bind(("table", Self::table_name()))
.bind(("id", self.id.clone()))
.bind(("dead", TaskState::DeadLetter.as_str()))
.bind(("failed", TaskState::Failed.as_str()))
.bind(("now", SurrealDatetime::from(now)))
.bind(("error_code", error.code.clone()))
.bind(("error_message", error.message.clone()))
.await?;
let updated: Option<IngestionTask> = result.take(0)?;
updated.ok_or_else(|| invalid_transition(&self.state, TaskTransition::DeadLetter))
}
pub async fn mark_cancelled(&self, db: &SurrealDbClient) -> Result<IngestionTask, AppError> {
const CANCEL_QUERY: &str = r#"
UPDATE type::thing($table, $id)
SET state = $cancelled,
updated_at = $now,
locked_at = NONE,
worker_id = NONE
WHERE state IN $allow_states
RETURN *;
"#;
let now = chrono::Utc::now();
let mut result = db
.client
.query(CANCEL_QUERY)
.bind(("table", Self::table_name()))
.bind(("id", self.id.clone()))
.bind(("cancelled", TaskState::Cancelled.as_str()))
.bind((
"allow_states",
vec![
TaskState::Pending.as_str(),
TaskState::Reserved.as_str(),
TaskState::Processing.as_str(),
],
))
.bind(("now", SurrealDatetime::from(now)))
.await?;
let updated: Option<IngestionTask> = result.take(0)?;
updated.ok_or_else(|| invalid_transition(&self.state, TaskTransition::Cancel))
}
pub async fn release(&self, db: &SurrealDbClient) -> Result<IngestionTask, AppError> {
const RELEASE_QUERY: &str = r#"
UPDATE type::thing($table, $id)
SET state = $pending,
updated_at = $now,
locked_at = NONE,
worker_id = NONE
WHERE state = $reserved
RETURN *;
"#;
let now = chrono::Utc::now();
let mut result = db
.client
.query(RELEASE_QUERY)
.bind(("table", Self::table_name()))
.bind(("id", self.id.clone()))
.bind(("pending", TaskState::Pending.as_str()))
.bind(("reserved", TaskState::Reserved.as_str()))
.bind(("now", SurrealDatetime::from(now)))
.await?;
let updated: Option<IngestionTask> = result.take(0)?;
updated.ok_or_else(|| invalid_transition(&self.state, TaskTransition::Release))
}
pub async fn get_unfinished_tasks(
db: &SurrealDbClient,
) -> Result<Vec<IngestionTask>, AppError> {
let tasks: Vec<IngestionTask> = db
.query(
"SELECT * FROM type::table($table)
WHERE
status.name = 'Created'
OR (
status.name = 'InProgress'
AND status.attempts < $max_attempts
)
ORDER BY created_at ASC",
"SELECT * FROM type::table($table)
WHERE state IN $active_states
ORDER BY scheduled_at ASC, created_at ASC",
)
.bind(("table", Self::table_name()))
.bind(("max_attempts", MAX_ATTEMPTS))
.bind((
"active_states",
vec![
TaskState::Pending.as_str(),
TaskState::Reserved.as_str(),
TaskState::Processing.as_str(),
TaskState::Failed.as_str(),
],
))
.await?
.take(0)?;
Ok(jobs)
Ok(tasks)
}
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::Utc;
use crate::storage::types::ingestion_payload::IngestionPayload;
// Helper function to create a test ingestion payload
fn create_test_payload(user_id: &str) -> IngestionPayload {
fn create_payload(user_id: &str) -> IngestionPayload {
IngestionPayload::Text {
text: "Test content".to_string(),
context: "Test context".to_string(),
@@ -119,182 +532,197 @@ mod tests {
}
}
async fn memory_db() -> SurrealDbClient {
let namespace = "test_ns";
let database = Uuid::new_v4().to_string();
SurrealDbClient::memory(namespace, &database)
.await
.expect("in-memory surrealdb")
}
#[tokio::test]
async fn test_new_ingestion_task() {
async fn test_new_task_defaults() {
let user_id = "user123";
let payload = create_test_payload(user_id);
let payload = create_payload(user_id);
let task = IngestionTask::new(payload.clone(), user_id.to_string());
let task = IngestionTask::new(payload.clone(), user_id.to_string()).await;
// Verify task properties
assert_eq!(task.user_id, user_id);
assert_eq!(task.content, payload);
assert!(matches!(task.status, IngestionTaskStatus::Created));
assert!(!task.id.is_empty());
assert_eq!(task.state, TaskState::Pending);
assert_eq!(task.attempts, 0);
assert_eq!(task.max_attempts, MAX_ATTEMPTS);
assert!(task.locked_at.is_none());
assert!(task.worker_id.is_none());
}
#[tokio::test]
async fn test_create_and_add_to_db() {
// Setup in-memory database
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
async fn test_create_and_store_task() {
let db = memory_db().await;
let user_id = "user123";
let payload = create_test_payload(user_id);
let payload = create_payload(user_id);
// Create and store task
IngestionTask::create_and_add_to_db(payload.clone(), user_id.to_string(), &db)
let created =
IngestionTask::create_and_add_to_db(payload.clone(), user_id.to_string(), &db)
.await
.expect("store");
let stored: Option<IngestionTask> = db
.get_item::<IngestionTask>(&created.id)
.await
.expect("Failed to create and add task to db");
.expect("fetch");
// Query to verify task was stored
let query = format!(
"SELECT * FROM {} WHERE user_id = '{}'",
IngestionTask::table_name(),
user_id
);
let mut result = db.query(query).await.expect("Query failed");
let tasks: Vec<IngestionTask> = result.take(0).unwrap_or_default();
// Verify task is in the database
assert!(!tasks.is_empty(), "Task should exist in the database");
let stored_task = &tasks[0];
assert_eq!(stored_task.user_id, user_id);
assert!(matches!(stored_task.status, IngestionTaskStatus::Created));
let stored = stored.expect("task exists");
assert_eq!(stored.id, created.id);
assert_eq!(stored.state, TaskState::Pending);
assert_eq!(stored.attempts, 0);
}
#[tokio::test]
async fn test_update_status() {
// Setup in-memory database
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
async fn test_claim_and_transition() {
let db = memory_db().await;
let user_id = "user123";
let payload = create_test_payload(user_id);
let payload = create_payload(user_id);
let task = IngestionTask::new(payload, user_id.to_string());
db.store_item(task.clone()).await.expect("store");
// Create task manually
let task = IngestionTask::new(payload.clone(), user_id.to_string()).await;
let task_id = task.id.clone();
let worker_id = "worker-1";
let now = chrono::Utc::now();
let claimed = IngestionTask::claim_next_ready(&db, worker_id, now, Duration::from_secs(60))
.await
.expect("claim");
// Store task
db.store_item(task).await.expect("Failed to store task");
let claimed = claimed.expect("task claimed");
assert_eq!(claimed.state, TaskState::Reserved);
assert_eq!(claimed.worker_id.as_deref(), Some(worker_id));
// Update status to InProgress
let now = Utc::now();
let new_status = IngestionTaskStatus::InProgress {
attempts: 1,
last_attempt: now,
let processing = claimed.mark_processing(&db).await.expect("processing");
assert_eq!(processing.state, TaskState::Processing);
let succeeded = processing.mark_succeeded(&db).await.expect("succeeded");
assert_eq!(succeeded.state, TaskState::Succeeded);
assert!(succeeded.worker_id.is_none());
assert!(succeeded.locked_at.is_none());
}
#[tokio::test]
async fn test_fail_and_dead_letter() {
let db = memory_db().await;
let user_id = "user123";
let payload = create_payload(user_id);
let task = IngestionTask::new(payload, user_id.to_string());
db.store_item(task.clone()).await.expect("store");
let worker_id = "worker-dead";
let now = chrono::Utc::now();
let claimed = IngestionTask::claim_next_ready(&db, worker_id, now, Duration::from_secs(60))
.await
.expect("claim")
.expect("claimed");
let processing = claimed.mark_processing(&db).await.expect("processing");
let error_info = TaskErrorInfo {
code: Some("pipeline_error".into()),
message: "failed".into(),
};
IngestionTask::update_status(&task_id, new_status.clone(), &db)
let failed = processing
.mark_failed(error_info.clone(), Duration::from_secs(30), &db)
.await
.expect("Failed to update status");
.expect("failed update");
assert_eq!(failed.state, TaskState::Failed);
assert_eq!(failed.error_message.as_deref(), Some("failed"));
assert!(failed.worker_id.is_none());
assert!(failed.locked_at.is_none());
assert!(failed.scheduled_at > now);
// Verify status updated
let updated_task: Option<IngestionTask> = db
.get_item::<IngestionTask>(&task_id)
let dead = failed
.mark_dead_letter(error_info.clone(), &db)
.await
.expect("Failed to get updated task");
.expect("dead letter");
assert_eq!(dead.state, TaskState::DeadLetter);
assert_eq!(dead.error_message.as_deref(), Some("failed"));
}
assert!(updated_task.is_some());
let updated_task = updated_task.unwrap();
#[tokio::test]
async fn test_mark_processing_requires_reservation() {
let db = memory_db().await;
let user_id = "user123";
let payload = create_payload(user_id);
match updated_task.status {
IngestionTaskStatus::InProgress { attempts, .. } => {
assert_eq!(attempts, 1);
let task = IngestionTask::new(payload.clone(), user_id.to_string());
db.store_item(task.clone()).await.expect("store");
let err = task
.mark_processing(&db)
.await
.expect_err("processing should fail without reservation");
match err {
AppError::Validation(message) => {
assert!(
message.contains("Pending -> start_processing"),
"unexpected message: {message}"
);
}
_ => panic!("Expected InProgress status"),
other => panic!("expected validation error, got {other:?}"),
}
}
#[tokio::test]
async fn test_get_unfinished_tasks() {
// Setup in-memory database
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
async fn test_mark_failed_requires_processing() {
let db = memory_db().await;
let user_id = "user123";
let payload = create_test_payload(user_id);
let payload = create_payload(user_id);
// Create tasks with different statuses
let created_task = IngestionTask::new(payload.clone(), user_id.to_string()).await;
let task = IngestionTask::new(payload.clone(), user_id.to_string());
db.store_item(task.clone()).await.expect("store");
let mut in_progress_task = IngestionTask::new(payload.clone(), user_id.to_string()).await;
in_progress_task.status = IngestionTaskStatus::InProgress {
attempts: 1,
last_attempt: Utc::now(),
};
let mut max_attempts_task = IngestionTask::new(payload.clone(), user_id.to_string()).await;
max_attempts_task.status = IngestionTaskStatus::InProgress {
attempts: MAX_ATTEMPTS,
last_attempt: Utc::now(),
};
let mut completed_task = IngestionTask::new(payload.clone(), user_id.to_string()).await;
completed_task.status = IngestionTaskStatus::Completed;
let mut error_task = IngestionTask::new(payload.clone(), user_id.to_string()).await;
error_task.status = IngestionTaskStatus::Error {
message: "Test error".to_string(),
};
// Store all tasks
db.store_item(created_task)
let err = task
.mark_failed(
TaskErrorInfo {
code: None,
message: "boom".into(),
},
Duration::from_secs(30),
&db,
)
.await
.expect("Failed to store created task");
db.store_item(in_progress_task)
.await
.expect("Failed to store in-progress task");
db.store_item(max_attempts_task)
.await
.expect("Failed to store max-attempts task");
db.store_item(completed_task)
.await
.expect("Failed to store completed task");
db.store_item(error_task)
.await
.expect("Failed to store error task");
.expect_err("failing should require processing state");
// Get unfinished tasks
let unfinished_tasks = IngestionTask::get_unfinished_tasks(&db)
match err {
AppError::Validation(message) => {
assert!(
message.contains("Pending -> fail"),
"unexpected message: {message}"
);
}
other => panic!("expected validation error, got {other:?}"),
}
}
#[tokio::test]
async fn test_release_requires_reservation() {
let db = memory_db().await;
let user_id = "user123";
let payload = create_payload(user_id);
let task = IngestionTask::new(payload.clone(), user_id.to_string());
db.store_item(task.clone()).await.expect("store");
let err = task
.release(&db)
.await
.expect("Failed to get unfinished tasks");
.expect_err("release should require reserved state");
// Verify only Created and InProgress with attempts < MAX_ATTEMPTS are returned
assert_eq!(unfinished_tasks.len(), 2);
let statuses: Vec<_> = unfinished_tasks
.iter()
.map(|task| match &task.status {
IngestionTaskStatus::Created => "Created",
IngestionTaskStatus::InProgress { attempts, .. } => {
if *attempts < MAX_ATTEMPTS {
"InProgress<MAX"
} else {
"InProgress>=MAX"
}
}
IngestionTaskStatus::Completed => "Completed",
IngestionTaskStatus::Error { .. } => "Error",
IngestionTaskStatus::Cancelled => "Cancelled",
})
.collect();
assert!(statuses.contains(&"Created"));
assert!(statuses.contains(&"InProgress<MAX"));
assert!(!statuses.contains(&"InProgress>=MAX"));
assert!(!statuses.contains(&"Completed"));
assert!(!statuses.contains(&"Error"));
assert!(!statuses.contains(&"Cancelled"));
match err {
AppError::Validation(message) => {
assert!(
message.contains("Pending -> release"),
"unexpected message: {message}"
);
}
other => panic!("expected validation error, got {other:?}"),
}
}
}

View File

@@ -40,6 +40,38 @@ impl From<String> for KnowledgeEntityType {
}
}
#[derive(Debug, Deserialize, Serialize)]
pub struct KnowledgeEntitySearchResult {
#[serde(deserialize_with = "deserialize_flexible_id")]
pub id: String,
#[serde(
serialize_with = "serialize_datetime",
deserialize_with = "deserialize_datetime",
default
)]
pub created_at: DateTime<Utc>,
#[serde(
serialize_with = "serialize_datetime",
deserialize_with = "deserialize_datetime",
default
)]
pub updated_at: DateTime<Utc>,
pub source_id: String,
pub name: String,
pub description: String,
pub entity_type: KnowledgeEntityType,
#[serde(default)]
pub metadata: Option<serde_json::Value>,
pub user_id: String,
pub score: f32,
#[serde(default)]
pub highlighted_name: Option<String>,
#[serde(default)]
pub highlighted_description: Option<String>,
}
stored_object!(KnowledgeEntity, "knowledge_entity", {
source_id: String,
name: String,
@@ -75,6 +107,50 @@ impl KnowledgeEntity {
}
}
pub async fn search(
db: &SurrealDbClient,
search_terms: &str,
user_id: &str,
limit: usize,
) -> Result<Vec<KnowledgeEntitySearchResult>, AppError> {
let sql = r#"
SELECT
id,
created_at,
updated_at,
source_id,
name,
description,
entity_type,
metadata,
user_id,
search::highlight('<b>', '</b>', 0) AS highlighted_name,
search::highlight('<b>', '</b>', 1) AS highlighted_description,
(
IF search::score(0) != NONE THEN search::score(0) ELSE 0 END +
IF search::score(1) != NONE THEN search::score(1) ELSE 0 END
) AS score
FROM knowledge_entity
WHERE
(
name @0@ $terms OR
description @1@ $terms
)
AND user_id = $user_id
ORDER BY score DESC
LIMIT $limit;
"#;
Ok(db
.client
.query(sql)
.bind(("terms", search_terms.to_owned()))
.bind(("user_id", user_id.to_owned()))
.bind(("limit", limit))
.await?
.take(0)?)
}
pub async fn delete_by_source_id(
source_id: &str,
db_client: &SurrealDbClient,

View File

@@ -7,6 +7,7 @@ pub mod ingestion_task;
pub mod knowledge_entity;
pub mod knowledge_relationship;
pub mod message;
pub mod scratchpad;
pub mod system_prompts;
pub mod system_settings;
pub mod text_chunk;
@@ -83,6 +84,32 @@ macro_rules! stored_object {
Ok(DateTime::<Utc>::from(dt))
}
#[allow(dead_code)]
fn serialize_option_datetime<S>(
date: &Option<DateTime<Utc>>,
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
match date {
Some(dt) => serializer
.serialize_some(&Into::<surrealdb::sql::Datetime>::into(*dt)),
None => serializer.serialize_none(),
}
}
#[allow(dead_code)]
fn deserialize_option_datetime<'de, D>(
deserializer: D,
) -> Result<Option<DateTime<Utc>>, D::Error>
where
D: serde::Deserializer<'de>,
{
let value = Option::<surrealdb::sql::Datetime>::deserialize(deserializer)?;
Ok(value.map(DateTime::<Utc>::from))
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct $name {
@@ -92,7 +119,7 @@ macro_rules! stored_object {
pub created_at: DateTime<Utc>,
#[serde(serialize_with = "serialize_datetime", deserialize_with = "deserialize_datetime", default)]
pub updated_at: DateTime<Utc>,
$(pub $field: $ty),*
$( $(#[$attr])* pub $field: $ty),*
}
impl StoredObject for $name {

View File

@@ -0,0 +1,502 @@
use chrono::Utc as ChronoUtc;
use surrealdb::opt::PatchOp;
use uuid::Uuid;
use crate::{error::AppError, storage::db::SurrealDbClient, stored_object};
stored_object!(Scratchpad, "scratchpad", {
user_id: String,
title: String,
content: String,
#[serde(serialize_with = "serialize_datetime", deserialize_with="deserialize_datetime")]
last_saved_at: DateTime<Utc>,
is_dirty: bool,
#[serde(default)]
is_archived: bool,
#[serde(
serialize_with = "serialize_option_datetime",
deserialize_with = "deserialize_option_datetime",
default
)]
archived_at: Option<DateTime<Utc>>,
#[serde(
serialize_with = "serialize_option_datetime",
deserialize_with = "deserialize_option_datetime",
default
)]
ingested_at: Option<DateTime<Utc>>
});
impl Scratchpad {
pub fn new(user_id: String, title: String) -> Self {
let now = ChronoUtc::now();
Self {
id: Uuid::new_v4().to_string(),
created_at: now,
updated_at: now,
user_id,
title,
content: String::new(),
last_saved_at: now,
is_dirty: false,
is_archived: false,
archived_at: None,
ingested_at: None,
}
}
pub async fn get_by_user(user_id: &str, db: &SurrealDbClient) -> Result<Vec<Self>, AppError> {
let scratchpads: Vec<Scratchpad> = db.client
.query("SELECT * FROM type::table($table_name) WHERE user_id = $user_id AND (is_archived = false OR is_archived IS NONE) ORDER BY updated_at DESC")
.bind(("table_name", Self::table_name()))
.bind(("user_id", user_id.to_string()))
.await?
.take(0)?;
Ok(scratchpads)
}
pub async fn get_archived_by_user(
user_id: &str,
db: &SurrealDbClient,
) -> Result<Vec<Self>, AppError> {
let scratchpads: Vec<Scratchpad> = db.client
.query("SELECT * FROM type::table($table_name) WHERE user_id = $user_id AND is_archived = true ORDER BY archived_at DESC, updated_at DESC")
.bind(("table_name", Self::table_name()))
.bind(("user_id", user_id.to_string()))
.await?
.take(0)?;
Ok(scratchpads)
}
pub async fn get_by_id(
id: &str,
user_id: &str,
db: &SurrealDbClient,
) -> Result<Self, AppError> {
let scratchpad: Option<Scratchpad> = db.get_item(id).await?;
let scratchpad =
scratchpad.ok_or_else(|| AppError::NotFound("Scratchpad not found".to_string()))?;
if scratchpad.user_id != user_id {
return Err(AppError::Auth(
"You don't have access to this scratchpad".to_string(),
));
}
Ok(scratchpad)
}
pub async fn update_content(
id: &str,
user_id: &str,
new_content: &str,
db: &SurrealDbClient,
) -> Result<Self, AppError> {
// First verify ownership
let scratchpad = Self::get_by_id(id, user_id, db).await?;
if scratchpad.is_archived {
return Ok(scratchpad);
}
let now = ChronoUtc::now();
let _updated: Option<Self> = db
.update((Self::table_name(), id))
.patch(PatchOp::replace("/content", new_content.to_string()))
.patch(PatchOp::replace(
"/updated_at",
surrealdb::Datetime::from(now),
))
.patch(PatchOp::replace(
"/last_saved_at",
surrealdb::Datetime::from(now),
))
.patch(PatchOp::replace("/is_dirty", false))
.await?;
// Return the updated scratchpad
Self::get_by_id(id, user_id, db).await
}
pub async fn update_title(
id: &str,
user_id: &str,
new_title: &str,
db: &SurrealDbClient,
) -> Result<(), AppError> {
// First verify ownership
let _scratchpad = Self::get_by_id(id, user_id, db).await?;
let _updated: Option<Self> = db
.update((Self::table_name(), id))
.patch(PatchOp::replace("/title", new_title.to_string()))
.patch(PatchOp::replace(
"/updated_at",
surrealdb::Datetime::from(ChronoUtc::now()),
))
.await?;
Ok(())
}
pub async fn delete(id: &str, user_id: &str, db: &SurrealDbClient) -> Result<(), AppError> {
// First verify ownership
let _scratchpad = Self::get_by_id(id, user_id, db).await?;
let _: Option<Self> = db.client.delete((Self::table_name(), id)).await?;
Ok(())
}
pub async fn archive(
id: &str,
user_id: &str,
db: &SurrealDbClient,
mark_ingested: bool,
) -> Result<Self, AppError> {
// Verify ownership
let scratchpad = Self::get_by_id(id, user_id, db).await?;
if scratchpad.is_archived {
if mark_ingested && scratchpad.ingested_at.is_none() {
// Ensure ingested_at is set if required
let surreal_now = surrealdb::Datetime::from(ChronoUtc::now());
let _updated: Option<Self> = db
.update((Self::table_name(), id))
.patch(PatchOp::replace("/ingested_at", surreal_now))
.await?;
return Self::get_by_id(id, user_id, db).await;
}
return Ok(scratchpad);
}
let now = ChronoUtc::now();
let surreal_now = surrealdb::Datetime::from(now);
let mut update = db
.update((Self::table_name(), id))
.patch(PatchOp::replace("/is_archived", true))
.patch(PatchOp::replace("/archived_at", surreal_now.clone()))
.patch(PatchOp::replace("/updated_at", surreal_now.clone()));
update = if mark_ingested {
update.patch(PatchOp::replace("/ingested_at", surreal_now))
} else {
update.patch(PatchOp::remove("/ingested_at"))
};
let _updated: Option<Self> = update.await?;
Self::get_by_id(id, user_id, db).await
}
pub async fn restore(id: &str, user_id: &str, db: &SurrealDbClient) -> Result<Self, AppError> {
// Verify ownership
let scratchpad = Self::get_by_id(id, user_id, db).await?;
if !scratchpad.is_archived {
return Ok(scratchpad);
}
let now = ChronoUtc::now();
let surreal_now = surrealdb::Datetime::from(now);
let _updated: Option<Self> = db
.update((Self::table_name(), id))
.patch(PatchOp::replace("/is_archived", false))
.patch(PatchOp::remove("/archived_at"))
.patch(PatchOp::remove("/ingested_at"))
.patch(PatchOp::replace("/updated_at", surreal_now))
.await?;
Self::get_by_id(id, user_id, db).await
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_create_scratchpad() {
// Setup in-memory database for testing
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
db.apply_migrations()
.await
.expect("Failed to apply migrations");
// Create a new scratchpad
let user_id = "test_user";
let title = "Test Scratchpad";
let scratchpad = Scratchpad::new(user_id.to_string(), title.to_string());
// Verify scratchpad properties
assert_eq!(scratchpad.user_id, user_id);
assert_eq!(scratchpad.title, title);
assert_eq!(scratchpad.content, "");
assert!(!scratchpad.is_dirty);
assert!(!scratchpad.is_archived);
assert!(scratchpad.archived_at.is_none());
assert!(scratchpad.ingested_at.is_none());
assert!(!scratchpad.id.is_empty());
// Store the scratchpad
let result = db.store_item(scratchpad.clone()).await;
assert!(result.is_ok());
// Verify it can be retrieved
let retrieved: Option<Scratchpad> = db
.get_item(&scratchpad.id)
.await
.expect("Failed to retrieve scratchpad");
assert!(retrieved.is_some());
let retrieved = retrieved.unwrap();
assert_eq!(retrieved.id, scratchpad.id);
assert_eq!(retrieved.user_id, user_id);
assert_eq!(retrieved.title, title);
assert!(!retrieved.is_archived);
assert!(retrieved.archived_at.is_none());
assert!(retrieved.ingested_at.is_none());
}
#[tokio::test]
async fn test_get_by_user() {
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
db.apply_migrations()
.await
.expect("Failed to apply migrations");
let user_id = "test_user";
// Create multiple scratchpads
let scratchpad1 = Scratchpad::new(user_id.to_string(), "First".to_string());
let scratchpad2 = Scratchpad::new(user_id.to_string(), "Second".to_string());
let scratchpad3 = Scratchpad::new("other_user".to_string(), "Other".to_string());
// Store them
let scratchpad1_id = scratchpad1.id.clone();
let scratchpad2_id = scratchpad2.id.clone();
db.store_item(scratchpad1).await.unwrap();
db.store_item(scratchpad2).await.unwrap();
db.store_item(scratchpad3).await.unwrap();
// Archive one of the user's scratchpads
Scratchpad::archive(&scratchpad2_id, user_id, &db, false)
.await
.unwrap();
// Get scratchpads for user_id
let user_scratchpads = Scratchpad::get_by_user(user_id, &db).await.unwrap();
assert_eq!(user_scratchpads.len(), 1);
assert_eq!(user_scratchpads[0].id, scratchpad1_id);
// Verify they belong to the user
for scratchpad in &user_scratchpads {
assert_eq!(scratchpad.user_id, user_id);
}
let archived = Scratchpad::get_archived_by_user(user_id, &db)
.await
.unwrap();
assert_eq!(archived.len(), 1);
assert_eq!(archived[0].id, scratchpad2_id);
assert!(archived[0].is_archived);
assert!(archived[0].ingested_at.is_none());
}
#[tokio::test]
async fn test_archive_and_restore() {
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
db.apply_migrations()
.await
.expect("Failed to apply migrations");
let user_id = "test_user";
let scratchpad = Scratchpad::new(user_id.to_string(), "Test".to_string());
let scratchpad_id = scratchpad.id.clone();
db.store_item(scratchpad).await.unwrap();
let archived = Scratchpad::archive(&scratchpad_id, user_id, &db, true)
.await
.expect("Failed to archive");
assert!(archived.is_archived);
assert!(archived.archived_at.is_some());
assert!(archived.ingested_at.is_some());
let restored = Scratchpad::restore(&scratchpad_id, user_id, &db)
.await
.expect("Failed to restore");
assert!(!restored.is_archived);
assert!(restored.archived_at.is_none());
assert!(restored.ingested_at.is_none());
}
#[tokio::test]
async fn test_update_content() {
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
db.apply_migrations()
.await
.expect("Failed to apply migrations");
let user_id = "test_user";
let scratchpad = Scratchpad::new(user_id.to_string(), "Test".to_string());
let scratchpad_id = scratchpad.id.clone();
db.store_item(scratchpad).await.unwrap();
let new_content = "Updated content";
let updated = Scratchpad::update_content(&scratchpad_id, user_id, new_content, &db)
.await
.unwrap();
assert_eq!(updated.content, new_content);
assert!(!updated.is_dirty);
}
#[tokio::test]
async fn test_update_content_unauthorized() {
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
db.apply_migrations()
.await
.expect("Failed to apply migrations");
let owner_id = "owner";
let other_user = "other_user";
let scratchpad = Scratchpad::new(owner_id.to_string(), "Test".to_string());
let scratchpad_id = scratchpad.id.clone();
db.store_item(scratchpad).await.unwrap();
let result = Scratchpad::update_content(&scratchpad_id, other_user, "Hacked", &db).await;
assert!(result.is_err());
match result {
Err(AppError::Auth(_)) => {}
_ => panic!("Expected Auth error"),
}
}
#[tokio::test]
async fn test_delete_scratchpad() {
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
db.apply_migrations()
.await
.expect("Failed to apply migrations");
let user_id = "test_user";
let scratchpad = Scratchpad::new(user_id.to_string(), "Test".to_string());
let scratchpad_id = scratchpad.id.clone();
db.store_item(scratchpad).await.unwrap();
// Delete should succeed
let result = Scratchpad::delete(&scratchpad_id, user_id, &db).await;
assert!(result.is_ok());
// Verify it's gone
let retrieved: Option<Scratchpad> = db.get_item(&scratchpad_id).await.unwrap();
assert!(retrieved.is_none());
}
#[tokio::test]
async fn test_delete_unauthorized() {
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
db.apply_migrations()
.await
.expect("Failed to apply migrations");
let owner_id = "owner";
let other_user = "other_user";
let scratchpad = Scratchpad::new(owner_id.to_string(), "Test".to_string());
let scratchpad_id = scratchpad.id.clone();
db.store_item(scratchpad).await.unwrap();
let result = Scratchpad::delete(&scratchpad_id, other_user, &db).await;
assert!(result.is_err());
match result {
Err(AppError::Auth(_)) => {}
_ => panic!("Expected Auth error"),
}
// Verify it still exists
let retrieved: Option<Scratchpad> = db.get_item(&scratchpad_id).await.unwrap();
assert!(retrieved.is_some());
}
#[tokio::test]
async fn test_timezone_aware_scratchpad_conversion() {
let db = SurrealDbClient::memory("test_ns", &Uuid::new_v4().to_string())
.await
.expect("Failed to create test database");
db.apply_migrations()
.await
.expect("Failed to apply migrations");
let user_id = "test_user_123";
let scratchpad =
Scratchpad::new(user_id.to_string(), "Test Timezone Scratchpad".to_string());
let scratchpad_id = scratchpad.id.clone();
db.store_item(scratchpad).await.unwrap();
let retrieved = Scratchpad::get_by_id(&scratchpad_id, user_id, &db)
.await
.unwrap();
// Test that datetime fields are preserved and can be used for timezone formatting
assert!(retrieved.created_at.timestamp() > 0);
assert!(retrieved.updated_at.timestamp() > 0);
assert!(retrieved.last_saved_at.timestamp() > 0);
// Test that optional datetime fields work correctly
assert!(retrieved.archived_at.is_none());
assert!(retrieved.ingested_at.is_none());
// Archive the scratchpad to test optional datetime handling
let archived = Scratchpad::archive(&scratchpad_id, user_id, &db, false)
.await
.unwrap();
assert!(archived.archived_at.is_some());
assert!(archived.archived_at.unwrap().timestamp() > 0);
assert!(archived.ingested_at.is_none());
}
}

View File

@@ -8,7 +8,7 @@ use uuid::Uuid;
use super::text_chunk::TextChunk;
use super::{
conversation::Conversation,
ingestion_task::{IngestionTask, MAX_ATTEMPTS},
ingestion_task::{IngestionTask, TaskState},
knowledge_entity::{KnowledgeEntity, KnowledgeEntityType},
knowledge_relationship::KnowledgeRelationship,
system_settings::SystemSettings,
@@ -168,7 +168,7 @@ impl User {
let now = Utc::now();
let id = Uuid::new_v4().to_string();
let user: Option<User> = db
let user: Option<Self> = db
.client
.query(
"LET $count = (SELECT count() FROM type::table($table))[0].count;
@@ -217,7 +217,7 @@ impl User {
password: &str,
db: &SurrealDbClient,
) -> Result<Self, AppError> {
let user: Option<User> = db
let user: Option<Self> = db
.client
.query(
"SELECT * FROM user
@@ -235,7 +235,7 @@ impl User {
email: &str,
db: &SurrealDbClient,
) -> Result<Option<Self>, AppError> {
let user: Option<User> = db
let user: Option<Self> = db
.client
.query("SELECT * FROM user WHERE email = $email LIMIT 1")
.bind(("email", email.to_string()))
@@ -249,7 +249,7 @@ impl User {
api_key: &str,
db: &SurrealDbClient,
) -> Result<Option<Self>, AppError> {
let user: Option<User> = db
let user: Option<Self> = db
.client
.query("SELECT * FROM user WHERE api_key = $api_key LIMIT 1")
.bind(("api_key", api_key.to_string()))
@@ -264,7 +264,7 @@ impl User {
let api_key = format!("sk_{}", Uuid::new_v4().to_string().replace("-", ""));
// Update the user record with the new API key
let user: Option<User> = db
let user: Option<Self> = db
.client
.query(
"UPDATE type::thing('user', $id)
@@ -285,7 +285,7 @@ impl User {
}
pub async fn revoke_api_key(id: &str, db: &SurrealDbClient) -> Result<(), AppError> {
let user: Option<User> = db
let user: Option<Self> = db
.client
.query(
"UPDATE type::thing('user', $id)
@@ -357,7 +357,7 @@ impl User {
let entity_types: Vec<String> = response
.into_iter()
.map(|item| {
let normalized = KnowledgeEntityType::from(item.entity_type.clone());
let normalized = KnowledgeEntityType::from(item.entity_type);
format!("{:?}", normalized)
})
.collect();
@@ -449,7 +449,7 @@ impl User {
db: &SurrealDbClient,
) -> Result<(), AppError> {
db.query("UPDATE type::thing('user', $user_id) SET timezone = $timezone")
.bind(("table_name", User::table_name()))
.bind(("table_name", Self::table_name()))
.bind(("user_id", user_id.to_string()))
.bind(("timezone", timezone.to_string()))
.await?;
@@ -535,19 +535,43 @@ impl User {
let jobs: Vec<IngestionTask> = db
.query(
"SELECT * FROM type::table($table)
WHERE user_id = $user_id
AND (
status.name = 'Created'
OR (
status.name = 'InProgress'
AND status.attempts < $max_attempts
)
)
ORDER BY created_at DESC",
WHERE user_id = $user_id
AND (
state IN $active_states
OR (state = $failed_state AND attempts < max_attempts)
)
ORDER BY scheduled_at ASC, created_at DESC",
)
.bind(("table", IngestionTask::table_name()))
.bind(("user_id", user_id.to_owned()))
.bind((
"active_states",
vec![
TaskState::Pending.as_str(),
TaskState::Reserved.as_str(),
TaskState::Processing.as_str(),
],
))
.bind(("failed_state", TaskState::Failed.as_str()))
.await?
.take(0)?;
Ok(jobs)
}
/// Gets all ingestion tasks for the specified user ordered by newest first
pub async fn get_all_ingestion_tasks(
user_id: &str,
db: &SurrealDbClient,
) -> Result<Vec<IngestionTask>, AppError> {
let jobs: Vec<IngestionTask> = db
.query(
"SELECT * FROM type::table($table)
WHERE user_id = $user_id
ORDER BY created_at DESC",
)
.bind(("table", IngestionTask::table_name()))
.bind(("user_id", user_id.to_owned()))
.bind(("max_attempts", MAX_ATTEMPTS))
.await?
.take(0)?;
@@ -605,7 +629,7 @@ impl User {
mod tests {
use super::*;
use crate::storage::types::ingestion_payload::IngestionPayload;
use crate::storage::types::ingestion_task::{IngestionTask, IngestionTaskStatus, MAX_ATTEMPTS};
use crate::storage::types::ingestion_task::{IngestionTask, TaskState, MAX_ATTEMPTS};
use std::collections::HashSet;
// Helper function to set up a test database with SystemSettings
@@ -705,33 +729,36 @@ mod tests {
user_id: user_id.to_string(),
};
let created_task = IngestionTask::new(payload.clone(), user_id.to_string()).await;
let created_task = IngestionTask::new(payload.clone(), user_id.to_string());
db.store_item(created_task.clone())
.await
.expect("Failed to store created task");
let mut in_progress_allowed =
IngestionTask::new(payload.clone(), user_id.to_string()).await;
in_progress_allowed.status = IngestionTaskStatus::InProgress {
attempts: 1,
last_attempt: chrono::Utc::now(),
};
db.store_item(in_progress_allowed.clone())
let mut processing_task = IngestionTask::new(payload.clone(), user_id.to_string());
processing_task.state = TaskState::Processing;
processing_task.attempts = 1;
db.store_item(processing_task.clone())
.await
.expect("Failed to store in-progress task");
.expect("Failed to store processing task");
let mut in_progress_blocked =
IngestionTask::new(payload.clone(), user_id.to_string()).await;
in_progress_blocked.status = IngestionTaskStatus::InProgress {
attempts: MAX_ATTEMPTS,
last_attempt: chrono::Utc::now(),
};
db.store_item(in_progress_blocked.clone())
let mut failed_retry_task = IngestionTask::new(payload.clone(), user_id.to_string());
failed_retry_task.state = TaskState::Failed;
failed_retry_task.attempts = 1;
failed_retry_task.scheduled_at = chrono::Utc::now() - chrono::Duration::minutes(5);
db.store_item(failed_retry_task.clone())
.await
.expect("Failed to store retryable failed task");
let mut failed_blocked_task = IngestionTask::new(payload.clone(), user_id.to_string());
failed_blocked_task.state = TaskState::Failed;
failed_blocked_task.attempts = MAX_ATTEMPTS;
failed_blocked_task.error_message = Some("Too many failures".into());
db.store_item(failed_blocked_task.clone())
.await
.expect("Failed to store blocked task");
let mut completed_task = IngestionTask::new(payload.clone(), user_id.to_string()).await;
completed_task.status = IngestionTaskStatus::Completed;
let mut completed_task = IngestionTask::new(payload.clone(), user_id.to_string());
completed_task.state = TaskState::Succeeded;
db.store_item(completed_task.clone())
.await
.expect("Failed to store completed task");
@@ -742,7 +769,7 @@ mod tests {
category: "Category".to_string(),
user_id: other_user_id.to_string(),
};
let other_task = IngestionTask::new(other_payload, other_user_id.to_string()).await;
let other_task = IngestionTask::new(other_payload, other_user_id.to_string());
db.store_item(other_task)
.await
.expect("Failed to store other user task");
@@ -755,10 +782,54 @@ mod tests {
unfinished.iter().map(|task| task.id.clone()).collect();
assert!(unfinished_ids.contains(&created_task.id));
assert!(unfinished_ids.contains(&in_progress_allowed.id));
assert!(!unfinished_ids.contains(&in_progress_blocked.id));
assert!(unfinished_ids.contains(&processing_task.id));
assert!(unfinished_ids.contains(&failed_retry_task.id));
assert!(!unfinished_ids.contains(&failed_blocked_task.id));
assert!(!unfinished_ids.contains(&completed_task.id));
assert_eq!(unfinished_ids.len(), 2);
assert_eq!(unfinished_ids.len(), 3);
}
#[tokio::test]
async fn test_get_all_ingestion_tasks_returns_sorted() {
let db = setup_test_db().await;
let user_id = "archive_user";
let other_user_id = "other_user";
let payload = IngestionPayload::Text {
text: "One".to_string(),
context: "Context".to_string(),
category: "Category".to_string(),
user_id: user_id.to_string(),
};
// Oldest task
let mut first = IngestionTask::new(payload.clone(), user_id.to_string());
first.created_at = first.created_at - chrono::Duration::minutes(1);
first.updated_at = first.created_at;
first.state = TaskState::Succeeded;
db.store_item(first.clone()).await.expect("store first");
// Latest task
let mut second = IngestionTask::new(payload.clone(), user_id.to_string());
second.state = TaskState::Processing;
db.store_item(second.clone()).await.expect("store second");
let other_payload = IngestionPayload::Text {
text: "Other".to_string(),
context: "Context".to_string(),
category: "Category".to_string(),
user_id: other_user_id.to_string(),
};
let other_task = IngestionTask::new(other_payload, other_user_id.to_string());
db.store_item(other_task).await.expect("store other");
let tasks = User::get_all_ingestion_tasks(user_id, &db)
.await
.expect("fetch all tasks");
assert_eq!(tasks.len(), 2);
assert_eq!(tasks[0].id, second.id); // newest first
assert_eq!(tasks[1].id, first.id);
}
#[tokio::test]

View File

@@ -1,10 +1,12 @@
use config::{Config, ConfigError, Environment, File};
use serde::Deserialize;
use std::env;
#[derive(Clone, Deserialize, Debug)]
#[derive(Clone, Deserialize, Debug, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum StorageKind {
Local,
Memory,
}
fn default_storage_kind() -> StorageKind {
@@ -42,6 +44,16 @@ pub struct AppConfig {
pub storage: StorageKind,
#[serde(default = "default_pdf_ingest_mode")]
pub pdf_ingest_mode: PdfIngestMode,
#[serde(default = "default_reranking_enabled")]
pub reranking_enabled: bool,
#[serde(default)]
pub reranking_pool_size: Option<usize>,
#[serde(default)]
pub fastembed_cache_dir: Option<String>,
#[serde(default)]
pub fastembed_show_download_progress: Option<bool>,
#[serde(default)]
pub fastembed_max_length: Option<usize>,
}
fn default_data_dir() -> String {
@@ -52,7 +64,66 @@ fn default_base_url() -> String {
"https://api.openai.com/v1".to_string()
}
fn default_reranking_enabled() -> bool {
false
}
pub fn ensure_ort_path() {
if env::var_os("ORT_DYLIB_PATH").is_some() {
return;
}
if let Ok(mut exe) = env::current_exe() {
exe.pop();
if cfg!(target_os = "windows") {
for p in [
exe.join("onnxruntime.dll"),
exe.join("lib").join("onnxruntime.dll"),
] {
if p.exists() {
env::set_var("ORT_DYLIB_PATH", p);
return;
}
}
}
let name = if cfg!(target_os = "macos") {
"libonnxruntime.dylib"
} else {
"libonnxruntime.so"
};
let p = exe.join("lib").join(name);
if p.exists() {
env::set_var("ORT_DYLIB_PATH", p);
}
}
}
impl Default for AppConfig {
fn default() -> Self {
Self {
openai_api_key: String::new(),
surrealdb_address: String::new(),
surrealdb_username: String::new(),
surrealdb_password: String::new(),
surrealdb_namespace: String::new(),
surrealdb_database: String::new(),
data_dir: default_data_dir(),
http_port: 0,
openai_base_url: default_base_url(),
storage: default_storage_kind(),
pdf_ingest_mode: default_pdf_ingest_mode(),
reranking_enabled: default_reranking_enabled(),
reranking_pool_size: None,
fastembed_cache_dir: None,
fastembed_show_download_progress: None,
fastembed_max_length: None,
}
}
}
pub fn get_config() -> Result<AppConfig, ConfigError> {
ensure_ort_path();
let config = Config::builder()
.add_source(File::with_name("config").required(false))
.add_source(Environment::default())

View File

@@ -59,13 +59,13 @@ impl TemplateEngine {
match self {
// Only compile this arm for debug builds
#[cfg(debug_assertions)]
TemplateEngine::AutoReload(reloader) => {
Self::AutoReload(reloader) => {
let env = reloader.acquire_env()?;
env.get_template(name)?.render(ctx)
}
// Only compile this arm for release builds
#[cfg(not(debug_assertions))]
TemplateEngine::Embedded(env) => env.get_template(name)?.render(ctx),
Self::Embedded(env) => env.get_template(name)?.render(ctx),
}
}
@@ -78,19 +78,17 @@ impl TemplateEngine {
match self {
// Only compile this arm for debug builds
#[cfg(debug_assertions)]
TemplateEngine::AutoReload(reloader) => {
let env = reloader.acquire_env()?;
let template = env.get_template(template_name)?;
let mut state = template.eval_to_state(context)?;
state.render_block(block_name)
}
Self::AutoReload(reloader) => reloader
.acquire_env()?
.get_template(template_name)?
.eval_to_state(context)?
.render_block(block_name),
// Only compile this arm for release builds
#[cfg(not(debug_assertions))]
TemplateEngine::Embedded(env) => {
let template = env.get_template(template_name)?;
let mut state = template.eval_to_state(context)?;
state.render_block(block_name)
}
Self::Embedded(env) => env
.get_template(template_name)?
.eval_to_state(context)?
.render_block(block_name),
}
}
}

View File

@@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2021"
license = "AGPL-3.0-or-later"
[lints]
workspace = true
[dependencies]
tokio = { workspace = true }
serde = { workspace = true }
@@ -16,5 +19,7 @@ surrealdb = { workspace = true }
futures = { workspace = true }
async-openai = { workspace = true }
uuid = { workspace = true }
fastembed = { workspace = true }
common = { path = "../common", features = ["test-utils"] }
state-machines = { workspace = true }

View File

@@ -8,19 +8,13 @@ use async_openai::{
};
use common::{
error::AppError,
storage::{
db::SurrealDbClient,
types::{
knowledge_entity::KnowledgeEntity,
message::{format_history, Message},
system_settings::SystemSettings,
},
storage::types::{
message::{format_history, Message},
system_settings::SystemSettings,
},
};
use serde::Deserialize;
use serde_json::{json, Value};
use crate::retrieve_entities;
use serde_json::Value;
use super::answer_retrieval_helper::get_query_response_schema;
@@ -37,80 +31,23 @@ pub struct LLMResponseFormat {
pub references: Vec<Reference>,
}
/// Orchestrates query processing and returns an answer with references
///
/// Takes a query and uses the provided clients to generate an answer with supporting references.
///
/// # Arguments
///
/// * `surreal_db_client` - Client for SurrealDB interactions
/// * `openai_client` - Client for OpenAI API calls
/// * `query` - The user's query string
/// * `user_id` - The user's id
///
/// # Returns
///
/// Returns a tuple of the answer and its references, or an API error
#[derive(Debug)]
pub struct Answer {
pub content: String,
pub references: Vec<String>,
}
pub async fn get_answer_with_references(
surreal_db_client: &SurrealDbClient,
openai_client: &async_openai::Client<async_openai::config::OpenAIConfig>,
query: &str,
user_id: &str,
) -> Result<Answer, AppError> {
let entities = retrieve_entities(surreal_db_client, openai_client, query, user_id).await?;
let settings = SystemSettings::get_current(surreal_db_client).await?;
let entities_json = format_entities_json(&entities);
let user_message = create_user_message(&entities_json, query);
let request = create_chat_request(user_message, &settings)?;
let response = openai_client.chat().create(request).await?;
let llm_response = process_llm_response(response).await?;
Ok(Answer {
content: llm_response.answer,
references: llm_response
.references
.into_iter()
.map(|r| r.reference)
.collect(),
})
}
pub fn format_entities_json(entities: &[KnowledgeEntity]) -> Value {
json!(entities
.iter()
.map(|entity| {
json!({
"KnowledgeEntity": {
"id": entity.id,
"name": entity.name,
"description": entity.description
}
})
})
.collect::<Vec<_>>())
}
pub fn create_user_message(entities_json: &Value, query: &str) -> String {
format!(
r#"
r"
Context Information:
==================
{}
{entities_json}
User Question:
==================
{}
"#,
entities_json, query
{query}
"
)
}
@@ -120,7 +57,7 @@ pub fn create_user_message_with_history(
query: &str,
) -> String {
format!(
r#"
r"
Chat history:
==================
{}
@@ -132,7 +69,7 @@ pub fn create_user_message_with_history(
User Question:
==================
{}
"#,
",
format_history(history),
entities_json,
query
@@ -174,7 +111,7 @@ pub async fn process_llm_response(
))
.and_then(|content| {
serde_json::from_str::<LLMResponseFormat>(content).map_err(|e| {
AppError::LLMParsing(format!("Failed to parse LLM response into analysis: {}", e))
AppError::LLMParsing(format!("Failed to parse LLM response into analysis: {e}"))
})
})
}

View File

@@ -0,0 +1,265 @@
use std::collections::HashMap;
use serde::Deserialize;
use tracing::debug;
use common::{
error::AppError,
storage::{db::SurrealDbClient, types::StoredObject},
};
use crate::scoring::Scored;
use common::storage::types::file_info::deserialize_flexible_id;
use surrealdb::sql::Thing;
#[derive(Debug, Deserialize)]
struct FtsScoreRow {
#[serde(deserialize_with = "deserialize_flexible_id")]
id: String,
fts_score: Option<f32>,
}
/// Executes a full-text search query against SurrealDB and returns scored results.
///
/// The function expects FTS indexes to exist for the provided table. Currently supports
/// `knowledge_entity` (name + description) and `text_chunk` (chunk).
pub async fn find_items_by_fts<T>(
take: usize,
query: &str,
db_client: &SurrealDbClient,
table: &str,
user_id: &str,
) -> Result<Vec<Scored<T>>, AppError>
where
T: for<'de> serde::Deserialize<'de> + StoredObject,
{
let (filter_clause, score_clause) = match table {
"knowledge_entity" => (
"(name @0@ $terms OR description @1@ $terms)",
"(IF search::score(0) != NONE THEN search::score(0) ELSE 0 END) + \
(IF search::score(1) != NONE THEN search::score(1) ELSE 0 END)",
),
"text_chunk" => (
"(chunk @0@ $terms)",
"IF search::score(0) != NONE THEN search::score(0) ELSE 0 END",
),
_ => {
return Err(AppError::Validation(format!(
"FTS not configured for table '{table}'"
)))
}
};
let sql = format!(
"SELECT id, {score_clause} AS fts_score \
FROM {table} \
WHERE {filter_clause} \
AND user_id = $user_id \
ORDER BY fts_score DESC \
LIMIT $limit",
table = table,
filter_clause = filter_clause,
score_clause = score_clause
);
debug!(
table = table,
limit = take,
"Executing FTS query with filter clause: {}",
filter_clause
);
let mut response = db_client
.query(sql)
.bind(("terms", query.to_owned()))
.bind(("user_id", user_id.to_owned()))
.bind(("limit", take as i64))
.await?;
let score_rows: Vec<FtsScoreRow> = response.take(0)?;
if score_rows.is_empty() {
return Ok(Vec::new());
}
let ids: Vec<String> = score_rows.iter().map(|row| row.id.clone()).collect();
let thing_ids: Vec<Thing> = ids
.iter()
.map(|id| Thing::from((table, id.as_str())))
.collect();
let mut items_response = db_client
.query("SELECT * FROM type::table($table) WHERE id IN $things AND user_id = $user_id")
.bind(("table", table.to_owned()))
.bind(("things", thing_ids.clone()))
.bind(("user_id", user_id.to_owned()))
.await?;
let items: Vec<T> = items_response.take(0)?;
let mut item_map: HashMap<String, T> = items
.into_iter()
.map(|item| (item.get_id().to_owned(), item))
.collect();
let mut results = Vec::with_capacity(score_rows.len());
for row in score_rows {
if let Some(item) = item_map.remove(&row.id) {
let score = row.fts_score.unwrap_or_default();
results.push(Scored::new(item).with_fts_score(score));
}
}
Ok(results)
}
#[cfg(test)]
mod tests {
use super::*;
use common::storage::types::{
knowledge_entity::{KnowledgeEntity, KnowledgeEntityType},
text_chunk::TextChunk,
StoredObject,
};
use uuid::Uuid;
fn dummy_embedding() -> Vec<f32> {
vec![0.0; 1536]
}
#[tokio::test]
async fn fts_preserves_single_field_score_for_name() {
let namespace = "fts_test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("failed to create in-memory surreal");
db.apply_migrations()
.await
.expect("failed to apply migrations");
let user_id = "user_fts";
let entity = KnowledgeEntity::new(
"source_a".into(),
"Rustacean handbook".into(),
"completely unrelated description".into(),
KnowledgeEntityType::Document,
None,
dummy_embedding(),
user_id.into(),
);
db.store_item(entity.clone())
.await
.expect("failed to insert entity");
db.rebuild_indexes()
.await
.expect("failed to rebuild indexes");
let results = find_items_by_fts::<KnowledgeEntity>(
5,
"rustacean",
&db,
KnowledgeEntity::table_name(),
user_id,
)
.await
.expect("fts query failed");
assert!(!results.is_empty(), "expected at least one FTS result");
assert!(
results[0].scores.fts.is_some(),
"expected an FTS score when only the name matched"
);
}
#[tokio::test]
async fn fts_preserves_single_field_score_for_description() {
let namespace = "fts_test_ns_desc";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("failed to create in-memory surreal");
db.apply_migrations()
.await
.expect("failed to apply migrations");
let user_id = "user_fts_desc";
let entity = KnowledgeEntity::new(
"source_b".into(),
"neutral name".into(),
"Detailed notes about async runtimes".into(),
KnowledgeEntityType::Document,
None,
dummy_embedding(),
user_id.into(),
);
db.store_item(entity.clone())
.await
.expect("failed to insert entity");
db.rebuild_indexes()
.await
.expect("failed to rebuild indexes");
let results = find_items_by_fts::<KnowledgeEntity>(
5,
"async",
&db,
KnowledgeEntity::table_name(),
user_id,
)
.await
.expect("fts query failed");
assert!(!results.is_empty(), "expected at least one FTS result");
assert!(
results[0].scores.fts.is_some(),
"expected an FTS score when only the description matched"
);
}
#[tokio::test]
async fn fts_preserves_scores_for_text_chunks() {
let namespace = "fts_test_ns_chunks";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("failed to create in-memory surreal");
db.apply_migrations()
.await
.expect("failed to apply migrations");
let user_id = "user_fts_chunk";
let chunk = TextChunk::new(
"source_chunk".into(),
"GraphQL documentation reference".into(),
dummy_embedding(),
user_id.into(),
);
db.store_item(chunk.clone())
.await
.expect("failed to insert chunk");
db.rebuild_indexes()
.await
.expect("failed to rebuild indexes");
let results =
find_items_by_fts::<TextChunk>(5, "graphql", &db, TextChunk::table_name(), user_id)
.await
.expect("fts query failed");
assert!(!results.is_empty(), "expected at least one FTS result");
assert!(
results[0].scores.fts.is_some(),
"expected an FTS score when chunk field matched"
);
}
}

View File

@@ -1,6 +1,14 @@
use surrealdb::Error;
use std::collections::{HashMap, HashSet};
use common::storage::{db::SurrealDbClient, types::knowledge_entity::KnowledgeEntity};
use surrealdb::{sql::Thing, Error};
use common::storage::{
db::SurrealDbClient,
types::{
knowledge_entity::KnowledgeEntity, knowledge_relationship::KnowledgeRelationship,
StoredObject,
},
};
/// Retrieves database entries that match a specific source identifier.
///
@@ -12,7 +20,7 @@ use common::storage::{db::SurrealDbClient, types::knowledge_entity::KnowledgeEnt
///
/// * `source_id` - The identifier to search for in the database
/// * `table_name` - The name of the table to search in
/// * `db_client` - The SurrealDB client instance for database operations
/// * `db_client` - The `SurrealDB` client instance for database operations
///
/// # Type Parameters
///
@@ -30,18 +38,21 @@ use common::storage::{db::SurrealDbClient, types::knowledge_entity::KnowledgeEnt
/// * The database query fails to execute
/// * The results cannot be deserialized into type `T`
pub async fn find_entities_by_source_ids<T>(
source_id: Vec<String>,
table_name: String,
source_ids: Vec<String>,
table_name: &str,
user_id: &str,
db: &SurrealDbClient,
) -> Result<Vec<T>, Error>
where
T: for<'de> serde::Deserialize<'de>,
{
let query = "SELECT * FROM type::table($table) WHERE source_id IN $source_ids";
let query =
"SELECT * FROM type::table($table) WHERE source_id IN $source_ids AND user_id = $user_id";
db.query(query)
.bind(("table", table_name))
.bind(("source_ids", source_id))
.bind(("table", table_name.to_owned()))
.bind(("source_ids", source_ids))
.bind(("user_id", user_id.to_owned()))
.await?
.take(0)
}
@@ -49,14 +60,92 @@ where
/// Find entities by their relationship to the id
pub async fn find_entities_by_relationship_by_id(
db: &SurrealDbClient,
entity_id: String,
entity_id: &str,
user_id: &str,
limit: usize,
) -> Result<Vec<KnowledgeEntity>, Error> {
let query = format!(
"SELECT *, <-> relates_to <-> knowledge_entity AS related FROM knowledge_entity:`{}`",
entity_id
);
let mut relationships_response = db
.query(
"
SELECT * FROM relates_to
WHERE metadata.user_id = $user_id
AND (in = type::thing('knowledge_entity', $entity_id)
OR out = type::thing('knowledge_entity', $entity_id))
",
)
.bind(("entity_id", entity_id.to_owned()))
.bind(("user_id", user_id.to_owned()))
.await?;
db.query(query).await?.take(0)
let relationships: Vec<KnowledgeRelationship> = relationships_response.take(0)?;
if relationships.is_empty() {
return Ok(Vec::new());
}
let mut neighbor_ids: Vec<String> = Vec::new();
let mut seen: HashSet<String> = HashSet::new();
for rel in relationships {
if rel.in_ == entity_id {
if seen.insert(rel.out.clone()) {
neighbor_ids.push(rel.out);
}
} else if rel.out == entity_id {
if seen.insert(rel.in_.clone()) {
neighbor_ids.push(rel.in_);
}
} else {
if seen.insert(rel.in_.clone()) {
neighbor_ids.push(rel.in_.clone());
}
if seen.insert(rel.out.clone()) {
neighbor_ids.push(rel.out);
}
}
}
neighbor_ids.retain(|id| id != entity_id);
if neighbor_ids.is_empty() {
return Ok(Vec::new());
}
if limit > 0 && neighbor_ids.len() > limit {
neighbor_ids.truncate(limit);
}
let thing_ids: Vec<Thing> = neighbor_ids
.iter()
.map(|id| Thing::from((KnowledgeEntity::table_name(), id.as_str())))
.collect();
let mut neighbors_response = db
.query("SELECT * FROM type::table($table) WHERE id IN $things AND user_id = $user_id")
.bind(("table", KnowledgeEntity::table_name().to_owned()))
.bind(("things", thing_ids))
.bind(("user_id", user_id.to_owned()))
.await?;
let neighbors: Vec<KnowledgeEntity> = neighbors_response.take(0)?;
if neighbors.is_empty() {
return Ok(Vec::new());
}
let mut neighbor_map: HashMap<String, KnowledgeEntity> = neighbors
.into_iter()
.map(|entity| (entity.id.clone(), entity))
.collect();
let mut ordered = Vec::new();
for id in neighbor_ids {
if let Some(entity) = neighbor_map.remove(&id) {
ordered.push(entity);
}
if limit > 0 && ordered.len() >= limit {
break;
}
}
Ok(ordered)
}
#[cfg(test)]
@@ -146,7 +235,7 @@ mod tests {
// Test finding entities by multiple source_ids
let source_ids = vec![source_id1.clone(), source_id2.clone()];
let found_entities: Vec<KnowledgeEntity> =
find_entities_by_source_ids(source_ids, KnowledgeEntity::table_name().to_string(), &db)
find_entities_by_source_ids(source_ids, KnowledgeEntity::table_name(), &user_id, &db)
.await
.expect("Failed to find entities by source_ids");
@@ -177,7 +266,8 @@ mod tests {
let single_source_id = vec![source_id1.clone()];
let found_entities: Vec<KnowledgeEntity> = find_entities_by_source_ids(
single_source_id,
KnowledgeEntity::table_name().to_string(),
KnowledgeEntity::table_name(),
&user_id,
&db,
)
.await
@@ -202,7 +292,8 @@ mod tests {
let non_existent_source_id = vec!["non_existent_source".to_string()];
let found_entities: Vec<KnowledgeEntity> = find_entities_by_source_ids(
non_existent_source_id,
KnowledgeEntity::table_name().to_string(),
KnowledgeEntity::table_name(),
&user_id,
&db,
)
.await
@@ -327,11 +418,15 @@ mod tests {
.expect("Failed to store relationship 2");
// Test finding entities related to the central entity
let related_entities = find_entities_by_relationship_by_id(&db, central_entity.id.clone())
.await
.expect("Failed to find entities by relationship");
let related_entities =
find_entities_by_relationship_by_id(&db, &central_entity.id, &user_id, usize::MAX)
.await
.expect("Failed to find entities by relationship");
// Check that we found relationships
assert!(related_entities.len() > 0, "Should find related entities");
assert!(
related_entities.len() >= 2,
"Should find related entities in both directions"
);
}
}

View File

@@ -1,6 +1,10 @@
pub mod answer_retrieval;
pub mod answer_retrieval_helper;
pub mod fts;
pub mod graph;
pub mod pipeline;
pub mod reranking;
pub mod scoring;
pub mod vector;
use common::{
@@ -10,81 +14,254 @@ use common::{
types::{knowledge_entity::KnowledgeEntity, text_chunk::TextChunk},
},
};
use futures::future::{try_join, try_join_all};
use graph::{find_entities_by_relationship_by_id, find_entities_by_source_ids};
use std::collections::HashMap;
use vector::find_items_by_vector_similarity;
use reranking::RerankerLease;
use tracing::instrument;
/// Performs a comprehensive knowledge entity retrieval using multiple search strategies
/// to find the most relevant entities for a given query.
///
/// # Strategy
/// The function employs a three-pronged approach to knowledge retrieval:
/// 1. Direct vector similarity search on knowledge entities
/// 2. Text chunk similarity search with source entity lookup
/// 3. Graph relationship traversal from related entities
///
/// This combined approach ensures both semantic similarity matches and structurally
/// related content are included in the results.
///
/// # Arguments
/// * `db_client` - SurrealDB client for database operations
/// * `openai_client` - OpenAI client for vector embeddings generation
/// * `query` - The search query string to find relevant knowledge entities
/// * 'user_id' - The user id of the current user
///
/// # Returns
/// * `Result<Vec<KnowledgeEntity>, AppError>` - A deduplicated vector of relevant
/// knowledge entities, or an error if the retrieval process fails
pub use pipeline::{retrieved_entities_to_json, RetrievalConfig, RetrievalTuning};
// Captures a supporting chunk plus its fused retrieval score for downstream prompts.
#[derive(Debug, Clone)]
pub struct RetrievedChunk {
pub chunk: TextChunk,
pub score: f32,
}
// Final entity representation returned to callers, enriched with ranked chunks.
#[derive(Debug, Clone)]
pub struct RetrievedEntity {
pub entity: KnowledgeEntity,
pub score: f32,
pub chunks: Vec<RetrievedChunk>,
}
// Primary orchestrator for the process of retrieving KnowledgeEntitities related to a input_text
#[instrument(skip_all, fields(user_id))]
pub async fn retrieve_entities(
db_client: &SurrealDbClient,
openai_client: &async_openai::Client<async_openai::config::OpenAIConfig>,
query: &str,
input_text: &str,
user_id: &str,
) -> Result<Vec<KnowledgeEntity>, AppError> {
let (items_from_knowledge_entity_similarity, closest_chunks) = try_join(
find_items_by_vector_similarity(
10,
query,
db_client,
"knowledge_entity",
openai_client,
user_id,
),
find_items_by_vector_similarity(5, query, db_client, "text_chunk", openai_client, user_id),
reranker: Option<RerankerLease>,
) -> Result<Vec<RetrievedEntity>, AppError> {
pipeline::run_pipeline(
db_client,
openai_client,
input_text,
user_id,
RetrievalConfig::default(),
reranker,
)
.await?;
let source_ids = closest_chunks
.iter()
.map(|chunk: &TextChunk| chunk.source_id.clone())
.collect::<Vec<String>>();
let items_from_text_chunk_similarity: Vec<KnowledgeEntity> =
find_entities_by_source_ids(source_ids, "knowledge_entity".to_string(), db_client).await?;
let items_from_relationships_futures: Vec<_> = items_from_text_chunk_similarity
.clone()
.into_iter()
.map(|entity| find_entities_by_relationship_by_id(db_client, entity.id.clone()))
.collect();
let items_from_relationships = try_join_all(items_from_relationships_futures)
.await?
.into_iter()
.flatten()
.collect::<Vec<KnowledgeEntity>>();
let entities: Vec<KnowledgeEntity> = items_from_knowledge_entity_similarity
.into_iter()
.chain(items_from_text_chunk_similarity.into_iter())
.chain(items_from_relationships.into_iter())
.fold(HashMap::new(), |mut map, entity| {
map.insert(entity.id.clone(), entity);
map
})
.into_values()
.collect();
Ok(entities)
.await
}
#[cfg(test)]
mod tests {
use super::*;
use async_openai::Client;
use common::storage::types::{
knowledge_entity::{KnowledgeEntity, KnowledgeEntityType},
knowledge_relationship::KnowledgeRelationship,
text_chunk::TextChunk,
};
use pipeline::RetrievalConfig;
use uuid::Uuid;
fn test_embedding() -> Vec<f32> {
vec![0.9, 0.1, 0.0]
}
fn entity_embedding_high() -> Vec<f32> {
vec![0.8, 0.2, 0.0]
}
fn entity_embedding_low() -> Vec<f32> {
vec![0.1, 0.9, 0.0]
}
fn chunk_embedding_primary() -> Vec<f32> {
vec![0.85, 0.15, 0.0]
}
fn chunk_embedding_secondary() -> Vec<f32> {
vec![0.2, 0.8, 0.0]
}
async fn setup_test_db() -> SurrealDbClient {
let namespace = "test_ns";
let database = &Uuid::new_v4().to_string();
let db = SurrealDbClient::memory(namespace, database)
.await
.expect("Failed to start in-memory surrealdb");
db.apply_migrations()
.await
.expect("Failed to apply migrations");
db.query(
"BEGIN TRANSACTION;
REMOVE INDEX IF EXISTS idx_embedding_chunks ON TABLE text_chunk;
DEFINE INDEX idx_embedding_chunks ON TABLE text_chunk FIELDS embedding HNSW DIMENSION 3;
REMOVE INDEX IF EXISTS idx_embedding_entities ON TABLE knowledge_entity;
DEFINE INDEX idx_embedding_entities ON TABLE knowledge_entity FIELDS embedding HNSW DIMENSION 3;
COMMIT TRANSACTION;",
)
.await
.expect("Failed to configure indices");
db
}
#[tokio::test]
async fn test_retrieve_entities_with_embedding_basic_flow() {
let db = setup_test_db().await;
let user_id = "test_user";
let entity = KnowledgeEntity::new(
"source_1".into(),
"Rust async guide".into(),
"Detailed notes about async runtimes".into(),
KnowledgeEntityType::Document,
None,
entity_embedding_high(),
user_id.into(),
);
let chunk = TextChunk::new(
entity.source_id.clone(),
"Tokio uses cooperative scheduling for fairness.".into(),
chunk_embedding_primary(),
user_id.into(),
);
db.store_item(entity.clone())
.await
.expect("Failed to store entity");
db.store_item(chunk.clone())
.await
.expect("Failed to store chunk");
let openai_client = Client::new();
let results = pipeline::run_pipeline_with_embedding(
&db,
&openai_client,
test_embedding(),
"Rust concurrency async tasks",
user_id,
RetrievalConfig::default(),
None,
)
.await
.expect("Hybrid retrieval failed");
assert!(
!results.is_empty(),
"Expected at least one retrieval result"
);
let top = &results[0];
assert!(
top.entity.name.contains("Rust"),
"Expected Rust entity to be ranked first"
);
assert!(
!top.chunks.is_empty(),
"Expected Rust entity to include supporting chunks"
);
}
#[tokio::test]
async fn test_graph_relationship_enriches_results() {
let db = setup_test_db().await;
let user_id = "graph_user";
let primary = KnowledgeEntity::new(
"primary_source".into(),
"Async Rust patterns".into(),
"Explores async runtimes and scheduling strategies.".into(),
KnowledgeEntityType::Document,
None,
entity_embedding_high(),
user_id.into(),
);
let neighbor = KnowledgeEntity::new(
"neighbor_source".into(),
"Tokio Scheduler Deep Dive".into(),
"Details on Tokio's cooperative scheduler.".into(),
KnowledgeEntityType::Document,
None,
entity_embedding_low(),
user_id.into(),
);
db.store_item(primary.clone())
.await
.expect("Failed to store primary entity");
db.store_item(neighbor.clone())
.await
.expect("Failed to store neighbor entity");
let primary_chunk = TextChunk::new(
primary.source_id.clone(),
"Rust async tasks use Tokio's cooperative scheduler.".into(),
chunk_embedding_primary(),
user_id.into(),
);
let neighbor_chunk = TextChunk::new(
neighbor.source_id.clone(),
"Tokio's scheduler manages task fairness across executors.".into(),
chunk_embedding_secondary(),
user_id.into(),
);
db.store_item(primary_chunk)
.await
.expect("Failed to store primary chunk");
db.store_item(neighbor_chunk)
.await
.expect("Failed to store neighbor chunk");
let openai_client = Client::new();
let relationship = KnowledgeRelationship::new(
primary.id.clone(),
neighbor.id.clone(),
user_id.into(),
"relationship_source".into(),
"references".into(),
);
relationship
.store_relationship(&db)
.await
.expect("Failed to store relationship");
let results = pipeline::run_pipeline_with_embedding(
&db,
&openai_client,
test_embedding(),
"Rust concurrency async tasks",
user_id,
RetrievalConfig::default(),
None,
)
.await
.expect("Hybrid retrieval failed");
let mut neighbor_entry = None;
for entity in &results {
if entity.entity.id == neighbor.id {
neighbor_entry = Some(entity.clone());
}
}
let neighbor_entry =
neighbor_entry.expect("Graph-enriched neighbor should appear in results");
assert!(
neighbor_entry.score > 0.2,
"Graph-enriched entity should have a meaningful fused score"
);
assert!(
neighbor_entry
.chunks
.iter()
.all(|chunk| chunk.chunk.source_id == neighbor.source_id),
"Neighbor entity should surface its own supporting chunks"
);
}
}

View File

@@ -0,0 +1,67 @@
use serde::{Deserialize, Serialize};
/// Tunable parameters that govern each retrieval stage.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RetrievalTuning {
pub entity_vector_take: usize,
pub chunk_vector_take: usize,
pub entity_fts_take: usize,
pub chunk_fts_take: usize,
pub score_threshold: f32,
pub fallback_min_results: usize,
pub token_budget_estimate: usize,
pub avg_chars_per_token: usize,
pub max_chunks_per_entity: usize,
pub graph_traversal_seed_limit: usize,
pub graph_neighbor_limit: usize,
pub graph_score_decay: f32,
pub graph_seed_min_score: f32,
pub graph_vector_inheritance: f32,
pub rerank_blend_weight: f32,
pub rerank_scores_only: bool,
pub rerank_keep_top: usize,
}
impl Default for RetrievalTuning {
fn default() -> Self {
Self {
entity_vector_take: 15,
chunk_vector_take: 20,
entity_fts_take: 10,
chunk_fts_take: 20,
score_threshold: 0.35,
fallback_min_results: 10,
token_budget_estimate: 2800,
avg_chars_per_token: 4,
max_chunks_per_entity: 4,
graph_traversal_seed_limit: 5,
graph_neighbor_limit: 6,
graph_score_decay: 0.75,
graph_seed_min_score: 0.4,
graph_vector_inheritance: 0.6,
rerank_blend_weight: 0.65,
rerank_scores_only: false,
rerank_keep_top: 8,
}
}
}
/// Wrapper containing tuning plus future flags for per-request overrides.
#[derive(Debug, Clone)]
pub struct RetrievalConfig {
pub tuning: RetrievalTuning,
}
impl RetrievalConfig {
pub fn new(tuning: RetrievalTuning) -> Self {
Self { tuning }
}
}
impl Default for RetrievalConfig {
fn default() -> Self {
Self {
tuning: RetrievalTuning::default(),
}
}
}

View File

@@ -0,0 +1,106 @@
mod config;
mod stages;
mod state;
pub use config::{RetrievalConfig, RetrievalTuning};
use crate::{reranking::RerankerLease, RetrievedEntity};
use async_openai::Client;
use common::{error::AppError, storage::db::SurrealDbClient};
use tracing::info;
/// Drives the retrieval pipeline from embedding through final assembly.
pub async fn run_pipeline(
db_client: &SurrealDbClient,
openai_client: &Client<async_openai::config::OpenAIConfig>,
input_text: &str,
user_id: &str,
config: RetrievalConfig,
reranker: Option<RerankerLease>,
) -> Result<Vec<RetrievedEntity>, AppError> {
let machine = state::ready();
let input_chars = input_text.chars().count();
let input_preview: String = input_text.chars().take(120).collect();
let input_preview_clean = input_preview.replace('\n', " ");
let preview_len = input_preview_clean.chars().count();
info!(
%user_id,
input_chars,
preview_truncated = input_chars > preview_len,
preview = %input_preview_clean,
"Starting ingestion retrieval pipeline"
);
let mut ctx = stages::PipelineContext::new(
db_client,
openai_client,
input_text.to_owned(),
user_id.to_owned(),
config,
reranker,
);
let machine = stages::embed(machine, &mut ctx).await?;
let machine = stages::collect_candidates(machine, &mut ctx).await?;
let machine = stages::expand_graph(machine, &mut ctx).await?;
let machine = stages::attach_chunks(machine, &mut ctx).await?;
let machine = stages::rerank(machine, &mut ctx).await?;
let results = stages::assemble(machine, &mut ctx)?;
Ok(results)
}
#[cfg(test)]
pub async fn run_pipeline_with_embedding(
db_client: &SurrealDbClient,
openai_client: &Client<async_openai::config::OpenAIConfig>,
query_embedding: Vec<f32>,
input_text: &str,
user_id: &str,
config: RetrievalConfig,
reranker: Option<RerankerLease>,
) -> Result<Vec<RetrievedEntity>, AppError> {
let machine = state::ready();
let mut ctx = stages::PipelineContext::with_embedding(
db_client,
openai_client,
query_embedding,
input_text.to_owned(),
user_id.to_owned(),
config,
reranker,
);
let machine = stages::embed(machine, &mut ctx).await?;
let machine = stages::collect_candidates(machine, &mut ctx).await?;
let machine = stages::expand_graph(machine, &mut ctx).await?;
let machine = stages::attach_chunks(machine, &mut ctx).await?;
let machine = stages::rerank(machine, &mut ctx).await?;
let results = stages::assemble(machine, &mut ctx)?;
Ok(results)
}
/// Helper exposed for tests to convert retrieved entities into downstream prompt JSON.
pub fn retrieved_entities_to_json(entities: &[RetrievedEntity]) -> serde_json::Value {
serde_json::json!(entities
.iter()
.map(|entry| {
serde_json::json!({
"KnowledgeEntity": {
"id": entry.entity.id,
"name": entry.entity.name,
"description": entry.entity.description,
"score": round_score(entry.score),
"chunks": entry.chunks.iter().map(|chunk| {
serde_json::json!({
"score": round_score(chunk.score),
"content": chunk.chunk.chunk
})
}).collect::<Vec<_>>()
}
})
})
.collect::<Vec<_>>())
}
fn round_score(value: f32) -> f64 {
(f64::from(value) * 1000.0).round() / 1000.0
}

View File

@@ -0,0 +1,769 @@
use async_openai::Client;
use common::{
error::AppError,
storage::{
db::SurrealDbClient,
types::{knowledge_entity::KnowledgeEntity, text_chunk::TextChunk, StoredObject},
},
utils::embedding::generate_embedding,
};
use fastembed::RerankResult;
use futures::{stream::FuturesUnordered, StreamExt};
use state_machines::core::GuardError;
use std::collections::{HashMap, HashSet};
use tracing::{debug, instrument, warn};
use crate::{
fts::find_items_by_fts,
graph::{find_entities_by_relationship_by_id, find_entities_by_source_ids},
reranking::RerankerLease,
scoring::{
clamp_unit, fuse_scores, merge_scored_by_id, min_max_normalize, sort_by_fused_desc,
FusionWeights, Scored,
},
vector::find_items_by_vector_similarity_with_embedding,
RetrievedChunk, RetrievedEntity,
};
use super::{
config::RetrievalConfig,
state::{
CandidatesLoaded, ChunksAttached, Embedded, GraphExpanded, HybridRetrievalMachine, Ready,
Reranked,
},
};
pub struct PipelineContext<'a> {
pub db_client: &'a SurrealDbClient,
pub openai_client: &'a Client<async_openai::config::OpenAIConfig>,
pub input_text: String,
pub user_id: String,
pub config: RetrievalConfig,
pub query_embedding: Option<Vec<f32>>,
pub entity_candidates: HashMap<String, Scored<KnowledgeEntity>>,
pub chunk_candidates: HashMap<String, Scored<TextChunk>>,
pub filtered_entities: Vec<Scored<KnowledgeEntity>>,
pub chunk_values: Vec<Scored<TextChunk>>,
pub reranker: Option<RerankerLease>,
}
impl<'a> PipelineContext<'a> {
pub fn new(
db_client: &'a SurrealDbClient,
openai_client: &'a Client<async_openai::config::OpenAIConfig>,
input_text: String,
user_id: String,
config: RetrievalConfig,
reranker: Option<RerankerLease>,
) -> Self {
Self {
db_client,
openai_client,
input_text,
user_id,
config,
query_embedding: None,
entity_candidates: HashMap::new(),
chunk_candidates: HashMap::new(),
filtered_entities: Vec::new(),
chunk_values: Vec::new(),
reranker,
}
}
#[cfg(test)]
pub fn with_embedding(
db_client: &'a SurrealDbClient,
openai_client: &'a Client<async_openai::config::OpenAIConfig>,
query_embedding: Vec<f32>,
input_text: String,
user_id: String,
config: RetrievalConfig,
reranker: Option<RerankerLease>,
) -> Self {
let mut ctx = Self::new(
db_client,
openai_client,
input_text,
user_id,
config,
reranker,
);
ctx.query_embedding = Some(query_embedding);
ctx
}
fn ensure_embedding(&self) -> Result<&Vec<f32>, AppError> {
self.query_embedding.as_ref().ok_or_else(|| {
AppError::InternalError(
"query embedding missing before candidate collection".to_string(),
)
})
}
}
#[instrument(level = "trace", skip_all)]
pub async fn embed(
machine: HybridRetrievalMachine<(), Ready>,
ctx: &mut PipelineContext<'_>,
) -> Result<HybridRetrievalMachine<(), Embedded>, AppError> {
let embedding_cached = ctx.query_embedding.is_some();
if embedding_cached {
debug!("Reusing cached query embedding for hybrid retrieval");
} else {
debug!("Generating query embedding for hybrid retrieval");
let embedding =
generate_embedding(ctx.openai_client, &ctx.input_text, ctx.db_client).await?;
ctx.query_embedding = Some(embedding);
}
machine
.embed()
.map_err(|(_, guard)| map_guard_error("embed", guard))
}
#[instrument(level = "trace", skip_all)]
pub async fn collect_candidates(
machine: HybridRetrievalMachine<(), Embedded>,
ctx: &mut PipelineContext<'_>,
) -> Result<HybridRetrievalMachine<(), CandidatesLoaded>, AppError> {
debug!("Collecting initial candidates via vector and FTS search");
let embedding = ctx.ensure_embedding()?.clone();
let tuning = &ctx.config.tuning;
let weights = FusionWeights::default();
let (vector_entities, vector_chunks, mut fts_entities, mut fts_chunks) = tokio::try_join!(
find_items_by_vector_similarity_with_embedding(
tuning.entity_vector_take,
embedding.clone(),
ctx.db_client,
"knowledge_entity",
&ctx.user_id,
),
find_items_by_vector_similarity_with_embedding(
tuning.chunk_vector_take,
embedding,
ctx.db_client,
"text_chunk",
&ctx.user_id,
),
find_items_by_fts(
tuning.entity_fts_take,
&ctx.input_text,
ctx.db_client,
"knowledge_entity",
&ctx.user_id,
),
find_items_by_fts(
tuning.chunk_fts_take,
&ctx.input_text,
ctx.db_client,
"text_chunk",
&ctx.user_id
),
)?;
debug!(
vector_entities = vector_entities.len(),
vector_chunks = vector_chunks.len(),
fts_entities = fts_entities.len(),
fts_chunks = fts_chunks.len(),
"Hybrid retrieval initial candidate counts"
);
normalize_fts_scores(&mut fts_entities);
normalize_fts_scores(&mut fts_chunks);
merge_scored_by_id(&mut ctx.entity_candidates, vector_entities);
merge_scored_by_id(&mut ctx.entity_candidates, fts_entities);
merge_scored_by_id(&mut ctx.chunk_candidates, vector_chunks);
merge_scored_by_id(&mut ctx.chunk_candidates, fts_chunks);
apply_fusion(&mut ctx.entity_candidates, weights);
apply_fusion(&mut ctx.chunk_candidates, weights);
machine
.collect_candidates()
.map_err(|(_, guard)| map_guard_error("collect_candidates", guard))
}
#[instrument(level = "trace", skip_all)]
pub async fn expand_graph(
machine: HybridRetrievalMachine<(), CandidatesLoaded>,
ctx: &mut PipelineContext<'_>,
) -> Result<HybridRetrievalMachine<(), GraphExpanded>, AppError> {
debug!("Expanding candidates using graph relationships");
let tuning = &ctx.config.tuning;
let weights = FusionWeights::default();
if ctx.entity_candidates.is_empty() {
return machine
.expand_graph()
.map_err(|(_, guard)| map_guard_error("expand_graph", guard));
}
let graph_seeds = seeds_from_candidates(
&ctx.entity_candidates,
tuning.graph_seed_min_score,
tuning.graph_traversal_seed_limit,
);
if graph_seeds.is_empty() {
return machine
.expand_graph()
.map_err(|(_, guard)| map_guard_error("expand_graph", guard));
}
let mut futures = FuturesUnordered::new();
for seed in graph_seeds {
let db = ctx.db_client;
let user = ctx.user_id.clone();
futures.push(async move {
let neighbors = find_entities_by_relationship_by_id(
db,
&seed.id,
&user,
tuning.graph_neighbor_limit,
)
.await;
(seed, neighbors)
});
}
while let Some((seed, neighbors_result)) = futures.next().await {
let neighbors = neighbors_result.map_err(AppError::from)?;
if neighbors.is_empty() {
continue;
}
for neighbor in neighbors {
if neighbor.id == seed.id {
continue;
}
let graph_score = clamp_unit(seed.fused * tuning.graph_score_decay);
let entry = ctx
.entity_candidates
.entry(neighbor.id.clone())
.or_insert_with(|| Scored::new(neighbor.clone()));
entry.item = neighbor;
let inherited_vector = clamp_unit(graph_score * tuning.graph_vector_inheritance);
let vector_existing = entry.scores.vector.unwrap_or(0.0);
if inherited_vector > vector_existing {
entry.scores.vector = Some(inherited_vector);
}
let existing_graph = entry.scores.graph.unwrap_or(f32::MIN);
if graph_score > existing_graph || entry.scores.graph.is_none() {
entry.scores.graph = Some(graph_score);
}
let fused = fuse_scores(&entry.scores, weights);
entry.update_fused(fused);
}
}
machine
.expand_graph()
.map_err(|(_, guard)| map_guard_error("expand_graph", guard))
}
#[instrument(level = "trace", skip_all)]
pub async fn attach_chunks(
machine: HybridRetrievalMachine<(), GraphExpanded>,
ctx: &mut PipelineContext<'_>,
) -> Result<HybridRetrievalMachine<(), ChunksAttached>, AppError> {
debug!("Attaching chunks to surviving entities");
let tuning = &ctx.config.tuning;
let weights = FusionWeights::default();
let chunk_by_source = group_chunks_by_source(&ctx.chunk_candidates);
backfill_entities_from_chunks(
&mut ctx.entity_candidates,
&chunk_by_source,
ctx.db_client,
&ctx.user_id,
weights,
)
.await?;
boost_entities_with_chunks(&mut ctx.entity_candidates, &chunk_by_source, weights);
let mut entity_results: Vec<Scored<KnowledgeEntity>> =
ctx.entity_candidates.values().cloned().collect();
sort_by_fused_desc(&mut entity_results);
let mut filtered_entities: Vec<Scored<KnowledgeEntity>> = entity_results
.iter()
.filter(|candidate| candidate.fused >= tuning.score_threshold)
.cloned()
.collect();
if filtered_entities.len() < tuning.fallback_min_results {
filtered_entities = entity_results
.into_iter()
.take(tuning.fallback_min_results)
.collect();
}
ctx.filtered_entities = filtered_entities;
let mut chunk_results: Vec<Scored<TextChunk>> =
ctx.chunk_candidates.values().cloned().collect();
sort_by_fused_desc(&mut chunk_results);
let mut chunk_by_id: HashMap<String, Scored<TextChunk>> = HashMap::new();
for chunk in chunk_results {
chunk_by_id.insert(chunk.item.id.clone(), chunk);
}
enrich_chunks_from_entities(
&mut chunk_by_id,
&ctx.filtered_entities,
ctx.db_client,
&ctx.user_id,
weights,
)
.await?;
let mut chunk_values: Vec<Scored<TextChunk>> = chunk_by_id.into_values().collect();
sort_by_fused_desc(&mut chunk_values);
ctx.chunk_values = chunk_values;
machine
.attach_chunks()
.map_err(|(_, guard)| map_guard_error("attach_chunks", guard))
}
#[instrument(level = "trace", skip_all)]
pub async fn rerank(
machine: HybridRetrievalMachine<(), ChunksAttached>,
ctx: &mut PipelineContext<'_>,
) -> Result<HybridRetrievalMachine<(), Reranked>, AppError> {
let mut applied = false;
if let Some(reranker) = ctx.reranker.as_ref() {
if ctx.filtered_entities.len() > 1 {
let documents = build_rerank_documents(ctx, ctx.config.tuning.max_chunks_per_entity);
if documents.len() > 1 {
match reranker.rerank(&ctx.input_text, documents).await {
Ok(results) if !results.is_empty() => {
apply_rerank_results(ctx, results);
applied = true;
}
Ok(_) => {
debug!("Reranker returned no results; retaining original ordering");
}
Err(err) => {
warn!(
error = %err,
"Reranking failed; continuing with original ordering"
);
}
}
} else {
debug!(
document_count = documents.len(),
"Skipping reranking stage; insufficient document context"
);
}
} else {
debug!("Skipping reranking stage; less than two entities available");
}
} else {
debug!("No reranker lease provided; skipping reranking stage");
}
if applied {
debug!("Applied reranking adjustments to candidate ordering");
}
machine
.rerank()
.map_err(|(_, guard)| map_guard_error("rerank", guard))
}
#[instrument(level = "trace", skip_all)]
pub fn assemble(
machine: HybridRetrievalMachine<(), Reranked>,
ctx: &mut PipelineContext<'_>,
) -> Result<Vec<RetrievedEntity>, AppError> {
debug!("Assembling final retrieved entities");
let tuning = &ctx.config.tuning;
let mut chunk_by_source: HashMap<String, Vec<Scored<TextChunk>>> = HashMap::new();
for chunk in ctx.chunk_values.drain(..) {
chunk_by_source
.entry(chunk.item.source_id.clone())
.or_default()
.push(chunk);
}
for chunk_list in chunk_by_source.values_mut() {
sort_by_fused_desc(chunk_list);
}
let mut token_budget_remaining = tuning.token_budget_estimate;
let mut results = Vec::new();
for entity in &ctx.filtered_entities {
let mut selected_chunks = Vec::new();
if let Some(candidates) = chunk_by_source.get_mut(&entity.item.source_id) {
let mut per_entity_count = 0;
candidates.sort_by(|a, b| {
b.fused
.partial_cmp(&a.fused)
.unwrap_or(std::cmp::Ordering::Equal)
});
for candidate in candidates.iter() {
if per_entity_count >= tuning.max_chunks_per_entity {
break;
}
let estimated_tokens =
estimate_tokens(&candidate.item.chunk, tuning.avg_chars_per_token);
if estimated_tokens > token_budget_remaining {
continue;
}
token_budget_remaining = token_budget_remaining.saturating_sub(estimated_tokens);
per_entity_count += 1;
selected_chunks.push(RetrievedChunk {
chunk: candidate.item.clone(),
score: candidate.fused,
});
}
}
results.push(RetrievedEntity {
entity: entity.item.clone(),
score: entity.fused,
chunks: selected_chunks,
});
if token_budget_remaining == 0 {
break;
}
}
machine
.assemble()
.map_err(|(_, guard)| map_guard_error("assemble", guard))?;
Ok(results)
}
fn map_guard_error(stage: &'static str, err: GuardError) -> AppError {
AppError::InternalError(format!(
"state machine guard '{stage}' failed: guard={}, event={}, kind={:?}",
err.guard, err.event, err.kind
))
}
fn normalize_fts_scores<T>(results: &mut [Scored<T>]) {
let raw_scores: Vec<f32> = results
.iter()
.map(|candidate| candidate.scores.fts.unwrap_or(0.0))
.collect();
let normalized = min_max_normalize(&raw_scores);
for (candidate, normalized_score) in results.iter_mut().zip(normalized.into_iter()) {
candidate.scores.fts = Some(normalized_score);
candidate.update_fused(0.0);
}
}
fn apply_fusion<T>(candidates: &mut HashMap<String, Scored<T>>, weights: FusionWeights)
where
T: StoredObject,
{
for candidate in candidates.values_mut() {
let fused = fuse_scores(&candidate.scores, weights);
candidate.update_fused(fused);
}
}
fn group_chunks_by_source(
chunks: &HashMap<String, Scored<TextChunk>>,
) -> HashMap<String, Vec<Scored<TextChunk>>> {
let mut by_source: HashMap<String, Vec<Scored<TextChunk>>> = HashMap::new();
for chunk in chunks.values() {
by_source
.entry(chunk.item.source_id.clone())
.or_default()
.push(chunk.clone());
}
by_source
}
async fn backfill_entities_from_chunks(
entity_candidates: &mut HashMap<String, Scored<KnowledgeEntity>>,
chunk_by_source: &HashMap<String, Vec<Scored<TextChunk>>>,
db_client: &SurrealDbClient,
user_id: &str,
weights: FusionWeights,
) -> Result<(), AppError> {
let mut missing_sources = Vec::new();
for source_id in chunk_by_source.keys() {
if !entity_candidates
.values()
.any(|entity| entity.item.source_id == *source_id)
{
missing_sources.push(source_id.clone());
}
}
if missing_sources.is_empty() {
return Ok(());
}
let related_entities: Vec<KnowledgeEntity> = find_entities_by_source_ids(
missing_sources.clone(),
"knowledge_entity",
user_id,
db_client,
)
.await
.unwrap_or_default();
if related_entities.is_empty() {
warn!("expected related entities for missing chunk sources, but none were found");
}
for entity in related_entities {
if let Some(chunks) = chunk_by_source.get(&entity.source_id) {
let best_chunk_score = chunks
.iter()
.map(|chunk| chunk.fused)
.fold(0.0f32, f32::max);
let mut scored = Scored::new(entity.clone()).with_vector_score(best_chunk_score);
let fused = fuse_scores(&scored.scores, weights);
scored.update_fused(fused);
entity_candidates.insert(entity.id.clone(), scored);
}
}
Ok(())
}
fn boost_entities_with_chunks(
entity_candidates: &mut HashMap<String, Scored<KnowledgeEntity>>,
chunk_by_source: &HashMap<String, Vec<Scored<TextChunk>>>,
weights: FusionWeights,
) {
for entity in entity_candidates.values_mut() {
if let Some(chunks) = chunk_by_source.get(&entity.item.source_id) {
let best_chunk_score = chunks
.iter()
.map(|chunk| chunk.fused)
.fold(0.0f32, f32::max);
if best_chunk_score > 0.0 {
let boosted = entity.scores.vector.unwrap_or(0.0).max(best_chunk_score);
entity.scores.vector = Some(boosted);
let fused = fuse_scores(&entity.scores, weights);
entity.update_fused(fused);
}
}
}
}
async fn enrich_chunks_from_entities(
chunk_candidates: &mut HashMap<String, Scored<TextChunk>>,
entities: &[Scored<KnowledgeEntity>],
db_client: &SurrealDbClient,
user_id: &str,
weights: FusionWeights,
) -> Result<(), AppError> {
let mut source_ids: HashSet<String> = HashSet::new();
for entity in entities {
source_ids.insert(entity.item.source_id.clone());
}
if source_ids.is_empty() {
return Ok(());
}
let chunks = find_entities_by_source_ids::<TextChunk>(
source_ids.into_iter().collect(),
"text_chunk",
user_id,
db_client,
)
.await?;
let mut entity_score_lookup: HashMap<String, f32> = HashMap::new();
for entity in entities {
entity_score_lookup.insert(entity.item.source_id.clone(), entity.fused);
}
for chunk in chunks {
let entry = chunk_candidates
.entry(chunk.id.clone())
.or_insert_with(|| Scored::new(chunk.clone()).with_vector_score(0.0));
let entity_score = entity_score_lookup
.get(&chunk.source_id)
.copied()
.unwrap_or(0.0);
entry.scores.vector = Some(entry.scores.vector.unwrap_or(0.0).max(entity_score * 0.8));
let fused = fuse_scores(&entry.scores, weights);
entry.update_fused(fused);
entry.item = chunk;
}
Ok(())
}
fn build_rerank_documents(ctx: &PipelineContext<'_>, max_chunks_per_entity: usize) -> Vec<String> {
if ctx.filtered_entities.is_empty() {
return Vec::new();
}
let mut chunk_by_source: HashMap<&str, Vec<&Scored<TextChunk>>> = HashMap::new();
for chunk in &ctx.chunk_values {
chunk_by_source
.entry(chunk.item.source_id.as_str())
.or_default()
.push(chunk);
}
ctx.filtered_entities
.iter()
.map(|entity| {
let mut doc = format!(
"Name: {}\nType: {:?}\nDescription: {}\n",
entity.item.name, entity.item.entity_type, entity.item.description
);
if let Some(chunks) = chunk_by_source.get(entity.item.source_id.as_str()) {
let mut chunk_refs = chunks.clone();
chunk_refs.sort_by(|a, b| {
b.fused
.partial_cmp(&a.fused)
.unwrap_or(std::cmp::Ordering::Equal)
});
let mut header_added = false;
for chunk in chunk_refs.into_iter().take(max_chunks_per_entity.max(1)) {
let snippet = chunk.item.chunk.trim();
if snippet.is_empty() {
continue;
}
if !header_added {
doc.push_str("Chunks:\n");
header_added = true;
}
doc.push_str("- ");
doc.push_str(snippet);
doc.push('\n');
}
}
doc
})
.collect()
}
fn apply_rerank_results(ctx: &mut PipelineContext<'_>, results: Vec<RerankResult>) {
if results.is_empty() || ctx.filtered_entities.is_empty() {
return;
}
let mut remaining: Vec<Option<Scored<KnowledgeEntity>>> =
std::mem::take(&mut ctx.filtered_entities)
.into_iter()
.map(Some)
.collect();
let raw_scores: Vec<f32> = results.iter().map(|r| r.score).collect();
let normalized_scores = min_max_normalize(&raw_scores);
let use_only = ctx.config.tuning.rerank_scores_only;
let blend = if use_only {
1.0
} else {
clamp_unit(ctx.config.tuning.rerank_blend_weight)
};
let mut reranked: Vec<Scored<KnowledgeEntity>> = Vec::with_capacity(remaining.len());
for (result, normalized) in results.into_iter().zip(normalized_scores.into_iter()) {
if let Some(slot) = remaining.get_mut(result.index) {
if let Some(mut candidate) = slot.take() {
let original = candidate.fused;
let blended = if use_only {
clamp_unit(normalized)
} else {
clamp_unit(original * (1.0 - blend) + normalized * blend)
};
candidate.update_fused(blended);
reranked.push(candidate);
}
} else {
warn!(
result_index = result.index,
"Reranker returned out-of-range index; skipping"
);
}
if reranked.len() == remaining.len() {
break;
}
}
for slot in remaining.into_iter() {
if let Some(candidate) = slot {
reranked.push(candidate);
}
}
ctx.filtered_entities = reranked;
let keep_top = ctx.config.tuning.rerank_keep_top;
if keep_top > 0 && ctx.filtered_entities.len() > keep_top {
ctx.filtered_entities.truncate(keep_top);
}
}
fn estimate_tokens(text: &str, avg_chars_per_token: usize) -> usize {
let chars = text.chars().count().max(1);
(chars / avg_chars_per_token).max(1)
}
#[derive(Clone)]
struct GraphSeed {
id: String,
fused: f32,
}
fn seeds_from_candidates(
entity_candidates: &HashMap<String, Scored<KnowledgeEntity>>,
min_score: f32,
limit: usize,
) -> Vec<GraphSeed> {
let mut seeds: Vec<GraphSeed> = entity_candidates
.values()
.filter(|entity| entity.fused >= min_score)
.map(|entity| GraphSeed {
id: entity.item.id.clone(),
fused: entity.fused,
})
.collect();
seeds.sort_by(|a, b| {
b.fused
.partial_cmp(&a.fused)
.unwrap_or(std::cmp::Ordering::Equal)
});
if seeds.len() > limit {
seeds.truncate(limit);
}
seeds
}

View File

@@ -0,0 +1,27 @@
use state_machines::state_machine;
state_machine! {
name: HybridRetrievalMachine,
state: HybridRetrievalState,
initial: Ready,
states: [Ready, Embedded, CandidatesLoaded, GraphExpanded, ChunksAttached, Reranked, Completed, Failed],
events {
embed { transition: { from: Ready, to: Embedded } }
collect_candidates { transition: { from: Embedded, to: CandidatesLoaded } }
expand_graph { transition: { from: CandidatesLoaded, to: GraphExpanded } }
attach_chunks { transition: { from: GraphExpanded, to: ChunksAttached } }
rerank { transition: { from: ChunksAttached, to: Reranked } }
assemble { transition: { from: Reranked, to: Completed } }
abort {
transition: { from: Ready, to: Failed }
transition: { from: CandidatesLoaded, to: Failed }
transition: { from: GraphExpanded, to: Failed }
transition: { from: ChunksAttached, to: Failed }
transition: { from: Reranked, to: Failed }
}
}
}
pub fn ready() -> HybridRetrievalMachine<(), Ready> {
HybridRetrievalMachine::new(())
}

View File

@@ -0,0 +1,170 @@
use std::{
env, fs,
path::{Path, PathBuf},
sync::{
atomic::{AtomicUsize, Ordering},
Arc,
},
thread::available_parallelism,
};
use common::{error::AppError, utils::config::AppConfig};
use fastembed::{RerankInitOptions, RerankResult, TextRerank};
use tokio::sync::{Mutex, OwnedSemaphorePermit, Semaphore};
use tracing::debug;
static NEXT_ENGINE: AtomicUsize = AtomicUsize::new(0);
fn pick_engine_index(pool_len: usize) -> usize {
let n = NEXT_ENGINE.fetch_add(1, Ordering::Relaxed);
n % pool_len
}
pub struct RerankerPool {
engines: Vec<Arc<Mutex<TextRerank>>>,
semaphore: Arc<Semaphore>,
}
impl RerankerPool {
/// Build the pool at startup.
/// `pool_size` controls max parallel reranks.
pub fn new(pool_size: usize) -> Result<Arc<Self>, AppError> {
Self::new_with_options(pool_size, RerankInitOptions::default())
}
fn new_with_options(
pool_size: usize,
init_options: RerankInitOptions,
) -> Result<Arc<Self>, AppError> {
if pool_size == 0 {
return Err(AppError::Validation(
"RERANKING_POOL_SIZE must be greater than zero".to_string(),
));
}
fs::create_dir_all(&init_options.cache_dir)?;
let mut engines = Vec::with_capacity(pool_size);
for x in 0..pool_size {
debug!("Creating reranking engine: {x}");
let model = TextRerank::try_new(init_options.clone())
.map_err(|e| AppError::InternalError(e.to_string()))?;
engines.push(Arc::new(Mutex::new(model)));
}
Ok(Arc::new(Self {
engines,
semaphore: Arc::new(Semaphore::new(pool_size)),
}))
}
/// Initialize a pool using application configuration.
pub fn maybe_from_config(config: &AppConfig) -> Result<Option<Arc<Self>>, AppError> {
if !config.reranking_enabled {
return Ok(None);
}
let pool_size = config.reranking_pool_size.unwrap_or_else(default_pool_size);
let init_options = build_rerank_init_options(config)?;
Self::new_with_options(pool_size, init_options).map(Some)
}
/// Check out capacity + pick an engine.
/// This returns a lease that can perform rerank().
pub async fn checkout(self: &Arc<Self>) -> RerankerLease {
// Acquire a permit. This enforces backpressure.
let permit = self
.semaphore
.clone()
.acquire_owned()
.await
.expect("semaphore closed");
// Pick an engine.
// This is naive: just pick based on a simple modulo counter.
// We use an atomic counter to avoid always choosing index 0.
let idx = pick_engine_index(self.engines.len());
let engine = self.engines[idx].clone();
RerankerLease {
_permit: permit,
engine,
}
}
}
fn default_pool_size() -> usize {
available_parallelism()
.map(|value| value.get().min(2))
.unwrap_or(2)
.max(1)
}
fn is_truthy(value: &str) -> bool {
matches!(
value.trim().to_ascii_lowercase().as_str(),
"1" | "true" | "yes" | "on"
)
}
fn build_rerank_init_options(config: &AppConfig) -> Result<RerankInitOptions, AppError> {
let mut options = RerankInitOptions::default();
let cache_dir = config
.fastembed_cache_dir
.as_ref()
.map(PathBuf::from)
.or_else(|| env::var("RERANKING_CACHE_DIR").ok().map(PathBuf::from))
.or_else(|| env::var("FASTEMBED_CACHE_DIR").ok().map(PathBuf::from))
.unwrap_or_else(|| {
Path::new(&config.data_dir)
.join("fastembed")
.join("reranker")
});
fs::create_dir_all(&cache_dir)?;
options.cache_dir = cache_dir;
let show_progress = config
.fastembed_show_download_progress
.or_else(|| env_bool("RERANKING_SHOW_DOWNLOAD_PROGRESS"))
.or_else(|| env_bool("FASTEMBED_SHOW_DOWNLOAD_PROGRESS"))
.unwrap_or(true);
options.show_download_progress = show_progress;
if let Some(max_length) = config.fastembed_max_length.or_else(|| {
env::var("RERANKING_MAX_LENGTH")
.ok()
.and_then(|value| value.parse().ok())
}) {
options.max_length = max_length;
}
Ok(options)
}
fn env_bool(key: &str) -> Option<bool> {
env::var(key).ok().map(|value| is_truthy(&value))
}
/// Active lease on a single TextRerank instance.
pub struct RerankerLease {
// When this drops the semaphore permit is released.
_permit: OwnedSemaphorePermit,
engine: Arc<Mutex<TextRerank>>,
}
impl RerankerLease {
pub async fn rerank(
&self,
query: &str,
documents: Vec<String>,
) -> Result<Vec<RerankResult>, AppError> {
// Lock this specific engine so we get &mut TextRerank
let mut guard = self.engine.lock().await;
guard
.rerank(query.to_owned(), documents, false, None)
.map_err(|e| AppError::InternalError(e.to_string()))
}
}

View File

@@ -0,0 +1,183 @@
use std::cmp::Ordering;
use common::storage::types::StoredObject;
/// Holds optional subscores gathered from different retrieval signals.
#[derive(Debug, Clone, Copy, Default)]
pub struct Scores {
pub fts: Option<f32>,
pub vector: Option<f32>,
pub graph: Option<f32>,
}
/// Generic wrapper combining an item with its accumulated retrieval scores.
#[derive(Debug, Clone)]
pub struct Scored<T> {
pub item: T,
pub scores: Scores,
pub fused: f32,
}
impl<T> Scored<T> {
pub fn new(item: T) -> Self {
Self {
item,
scores: Scores::default(),
fused: 0.0,
}
}
pub const fn with_vector_score(mut self, score: f32) -> Self {
self.scores.vector = Some(score);
self
}
pub const fn with_fts_score(mut self, score: f32) -> Self {
self.scores.fts = Some(score);
self
}
pub const fn with_graph_score(mut self, score: f32) -> Self {
self.scores.graph = Some(score);
self
}
pub const fn update_fused(&mut self, fused: f32) {
self.fused = fused;
}
}
/// Weights used for linear score fusion.
#[derive(Debug, Clone, Copy)]
pub struct FusionWeights {
pub vector: f32,
pub fts: f32,
pub graph: f32,
pub multi_bonus: f32,
}
impl Default for FusionWeights {
fn default() -> Self {
Self {
vector: 0.5,
fts: 0.3,
graph: 0.2,
multi_bonus: 0.02,
}
}
}
pub const fn clamp_unit(value: f32) -> f32 {
value.clamp(0.0, 1.0)
}
pub fn distance_to_similarity(distance: f32) -> f32 {
if !distance.is_finite() {
return 0.0;
}
clamp_unit(1.0 / (1.0 + distance.max(0.0)))
}
pub fn min_max_normalize(scores: &[f32]) -> Vec<f32> {
if scores.is_empty() {
return Vec::new();
}
let mut min = f32::MAX;
let mut max = f32::MIN;
for s in scores {
if !s.is_finite() {
continue;
}
if *s < min {
min = *s;
}
if *s > max {
max = *s;
}
}
if !min.is_finite() || !max.is_finite() {
return scores.iter().map(|_| 0.0).collect();
}
if (max - min).abs() < f32::EPSILON {
return vec![1.0; scores.len()];
}
scores
.iter()
.map(|score| {
if score.is_finite() {
clamp_unit((score - min) / (max - min))
} else {
0.0
}
})
.collect()
}
pub fn fuse_scores(scores: &Scores, weights: FusionWeights) -> f32 {
let vector = scores.vector.unwrap_or(0.0);
let fts = scores.fts.unwrap_or(0.0);
let graph = scores.graph.unwrap_or(0.0);
let mut fused = graph.mul_add(
weights.graph,
vector.mul_add(weights.vector, fts * weights.fts),
);
let signals_present = scores
.vector
.iter()
.chain(scores.fts.iter())
.chain(scores.graph.iter())
.count();
if signals_present >= 2 {
fused += weights.multi_bonus;
}
clamp_unit(fused)
}
pub fn merge_scored_by_id<T>(
target: &mut std::collections::HashMap<String, Scored<T>>,
incoming: Vec<Scored<T>>,
) where
T: StoredObject + Clone,
{
for scored in incoming {
let id = scored.item.get_id().to_owned();
target
.entry(id)
.and_modify(|existing| {
if let Some(score) = scored.scores.vector {
existing.scores.vector = Some(score);
}
if let Some(score) = scored.scores.fts {
existing.scores.fts = Some(score);
}
if let Some(score) = scored.scores.graph {
existing.scores.graph = Some(score);
}
})
.or_insert_with(|| Scored {
item: scored.item.clone(),
scores: scored.scores,
fused: scored.fused,
});
}
}
pub fn sort_by_fused_desc<T>(items: &mut [Scored<T>])
where
T: StoredObject,
{
items.sort_by(|a, b| {
b.fused
.partial_cmp(&a.fused)
.unwrap_or(Ordering::Equal)
.then_with(|| a.item.get_id().cmp(b.item.get_id()))
});
}

View File

@@ -1,4 +1,15 @@
use common::{error::AppError, storage::db::SurrealDbClient, utils::embedding::generate_embedding};
use std::collections::HashMap;
use common::storage::types::file_info::deserialize_flexible_id;
use common::{
error::AppError,
storage::{db::SurrealDbClient, types::StoredObject},
utils::embedding::generate_embedding,
};
use serde::Deserialize;
use surrealdb::sql::Thing;
use crate::scoring::{clamp_unit, distance_to_similarity, Scored};
/// Compares vectors and retrieves a number of items from the specified table.
///
@@ -22,24 +33,125 @@ use common::{error::AppError, storage::db::SurrealDbClient, utils::embedding::ge
///
/// * `T` - The type to deserialize the query results into. Must implement `serde::Deserialize`.
pub async fn find_items_by_vector_similarity<T>(
take: u8,
take: usize,
input_text: &str,
db_client: &SurrealDbClient,
table: &str,
openai_client: &async_openai::Client<async_openai::config::OpenAIConfig>,
user_id: &str,
) -> Result<Vec<T>, AppError>
) -> Result<Vec<Scored<T>>, AppError>
where
T: for<'de> serde::Deserialize<'de>,
T: for<'de> serde::Deserialize<'de> + StoredObject,
{
// Generate embeddings
let input_embedding = generate_embedding(openai_client, input_text, db_client).await?;
// Construct the query
let closest_query = format!("SELECT *, vector::distance::knn() AS distance FROM {} WHERE user_id = '{}' AND embedding <|{},40|> {:?} ORDER BY distance", table, user_id, take, input_embedding);
// Perform query and deserialize to struct
let closest_entities: Vec<T> = db_client.query(closest_query).await?.take(0)?;
Ok(closest_entities)
find_items_by_vector_similarity_with_embedding(take, input_embedding, db_client, table, user_id)
.await
}
#[derive(Debug, Deserialize)]
struct DistanceRow {
#[serde(deserialize_with = "deserialize_flexible_id")]
id: String,
distance: Option<f32>,
}
pub async fn find_items_by_vector_similarity_with_embedding<T>(
take: usize,
query_embedding: Vec<f32>,
db_client: &SurrealDbClient,
table: &str,
user_id: &str,
) -> Result<Vec<Scored<T>>, AppError>
where
T: for<'de> serde::Deserialize<'de> + StoredObject,
{
let embedding_literal = serde_json::to_string(&query_embedding)
.map_err(|err| AppError::InternalError(format!("Failed to serialize embedding: {err}")))?;
let closest_query = format!(
"SELECT id, vector::distance::knn() AS distance \
FROM {table} \
WHERE user_id = $user_id AND embedding <|{take},40|> {embedding} \
LIMIT $limit",
table = table,
take = take,
embedding = embedding_literal
);
let mut response = db_client
.query(closest_query)
.bind(("user_id", user_id.to_owned()))
.bind(("limit", take as i64))
.await?;
let distance_rows: Vec<DistanceRow> = response.take(0)?;
if distance_rows.is_empty() {
return Ok(Vec::new());
}
let ids: Vec<String> = distance_rows.iter().map(|row| row.id.clone()).collect();
let thing_ids: Vec<Thing> = ids
.iter()
.map(|id| Thing::from((table, id.as_str())))
.collect();
let mut items_response = db_client
.query("SELECT * FROM type::table($table) WHERE id IN $things AND user_id = $user_id")
.bind(("table", table.to_owned()))
.bind(("things", thing_ids.clone()))
.bind(("user_id", user_id.to_owned()))
.await?;
let items: Vec<T> = items_response.take(0)?;
let mut item_map: HashMap<String, T> = items
.into_iter()
.map(|item| (item.get_id().to_owned(), item))
.collect();
let mut min_distance = f32::MAX;
let mut max_distance = f32::MIN;
for row in &distance_rows {
if let Some(distance) = row.distance {
if distance.is_finite() {
if distance < min_distance {
min_distance = distance;
}
if distance > max_distance {
max_distance = distance;
}
}
}
}
let normalize = min_distance.is_finite()
&& max_distance.is_finite()
&& (max_distance - min_distance).abs() > f32::EPSILON;
let mut scored = Vec::with_capacity(distance_rows.len());
for row in distance_rows {
if let Some(item) = item_map.remove(&row.id) {
let similarity = row
.distance
.map(|distance| {
if normalize {
let span = max_distance - min_distance;
if span.abs() < f32::EPSILON {
1.0
} else {
let normalized = 1.0 - ((distance - min_distance) / span);
clamp_unit(normalized)
}
} else {
distance_to_similarity(distance)
}
})
.unwrap_or_default();
scored.push(Scored::new(item).with_vector_score(similarity));
}
}
Ok(scored)
}

View File

@@ -3,10 +3,10 @@
"devenv": {
"locked": {
"dir": "src/modules",
"lastModified": 1746681099,
"lastModified": 1761839147,
"owner": "cachix",
"repo": "devenv",
"rev": "a7f2ea275621391209fd702f5ddced32dd56a4e2",
"rev": "bb7849648b68035f6b910120252c22b28195cf54",
"type": "github"
},
"original": {
@@ -16,13 +16,31 @@
"type": "github"
}
},
"fenix": {
"inputs": {
"nixpkgs": "nixpkgs",
"rust-analyzer-src": "rust-analyzer-src"
},
"locked": {
"lastModified": 1761893049,
"owner": "nix-community",
"repo": "fenix",
"rev": "c2ac9a5c0d6d16630c3b225b874bd14528d1abe6",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "fenix",
"type": "github"
}
},
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1733328505,
"lastModified": 1761588595,
"owner": "edolstra",
"repo": "flake-compat",
"rev": "ff81ac966bb2cae68946d5ed5fc4994f96d0ffec",
"rev": "f387cd2afec9419c8ee37694406ca490c3f34ee5",
"type": "github"
},
"original": {
@@ -40,10 +58,10 @@
]
},
"locked": {
"lastModified": 1746537231,
"lastModified": 1760663237,
"owner": "cachix",
"repo": "git-hooks.nix",
"rev": "fa466640195d38ec97cf0493d6d6882bc4d14969",
"rev": "ca5b894d3e3e151ffc1db040b6ce4dcc75d31c37",
"type": "github"
},
"original": {
@@ -74,10 +92,25 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1746576598,
"lastModified": 1761672384,
"owner": "nixos",
"repo": "nixpkgs",
"rev": "b3582c75c7f21ce0b429898980eddbbf05c68e55",
"rev": "08dacfca559e1d7da38f3cf05f1f45ee9bfd213c",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1761880412,
"owner": "nixos",
"repo": "nixpkgs",
"rev": "a7fc11be66bdfb5cdde611ee5ce381c183da8386",
"type": "github"
},
"original": {
@@ -90,11 +123,48 @@
"root": {
"inputs": {
"devenv": "devenv",
"fenix": "fenix",
"git-hooks": "git-hooks",
"nixpkgs": "nixpkgs",
"nixpkgs": "nixpkgs_2",
"pre-commit-hooks": [
"git-hooks"
],
"rust-overlay": "rust-overlay"
}
},
"rust-analyzer-src": {
"flake": false,
"locked": {
"lastModified": 1761849405,
"owner": "rust-lang",
"repo": "rust-analyzer",
"rev": "f7de8ae045a5fe80f1203c5a1c3015b05f7c3550",
"type": "github"
},
"original": {
"owner": "rust-lang",
"ref": "nightly",
"repo": "rust-analyzer",
"type": "github"
}
},
"rust-overlay": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1761878277,
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "6604534e44090c917db714faa58d47861657690c",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
}
},

View File

@@ -11,14 +11,24 @@
pkgs.openssl
pkgs.nodejs
pkgs.vscode-langservers-extracted
pkgs.cargo-dist
pkgs.cargo-xwin
pkgs.clang
pkgs.onnxruntime
];
languages.rust = {
enable = true;
components = ["rustc" "clippy" "rustfmt" "cargo" "rust-analyzer"];
channel = "nightly";
targets = ["x86_64-unknown-linux-gnu" "x86_64-pc-windows-msvc"];
mold.enable = true;
};
env = {
ORT_DYLIB_PATH = "${pkgs.onnxruntime}/lib/libonnxruntime.so";
};
processes = {
surreal_db.exec = "docker run --rm --pull always -p 8000:8000 --net=host --user $(id -u) -v $(pwd)/database:/database surrealdb/surrealdb:latest-dev start rocksdb:/database/database.db --user root_user --pass root_password";
};

View File

@@ -1,15 +1,11 @@
# yaml-language-server: $schema=https://devenv.sh/devenv.schema.json
inputs:
fenix:
url: github:nix-community/fenix
nixpkgs:
url: github:nixos/nixpkgs/nixpkgs-unstable
# If you're using non-OSS software, you can set allowUnfree to true.
rust-overlay:
url: github:oxalica/rust-overlay
inputs:
nixpkgs:
follows: nixpkgs
allowUnfree: true
# If you're willing to use a package that's vulnerable
# permittedInsecurePackages:
# - "openssl-1.1.1w"
# If you have more than one devenv you can merge them
#imports:
# - ./backend

View File

@@ -4,9 +4,11 @@ members = ["cargo:."]
# Config for 'dist'
[dist]
# The preferred dist version to use in CI (Cargo.toml SemVer syntax)
cargo-dist-version = "0.28.0"
cargo-dist-version = "0.30.0"
# CI backends to support
ci = "github"
# Extra static files to include in each App (path relative to this Cargo.toml's dir)
include = ["lib"]
# The installers to generate for each app
installers = []
# Target platforms to build apps for (Rust target-triple syntax)

View File

@@ -1,5 +1,3 @@
version: '3.8'
services:
minne:
build: .
@@ -12,10 +10,11 @@ services:
SURREALDB_PASSWORD: "root_password"
SURREALDB_DATABASE: "test"
SURREALDB_NAMESPACE: "test"
OPENAI_API_KEY: "sk-key"
OPENAI_API_KEY: "sk-add-your-key"
DATA_DIR: "./data"
HTTP_PORT: 3000
RUST_LOG: "info"
RERANKING_ENABLED: false ## Change to true to enable reranking
depends_on:
- surrealdb
networks:
@@ -31,7 +30,7 @@ services:
- ./database:/database # Mounts a 'database' folder from your project directory
command: >
start
--log debug
--log info
--user root_user
--pass root_password
rocksdb:./database/database.db

22
flake.lock generated
View File

@@ -1,5 +1,20 @@
{
"nodes": {
"crane": {
"locked": {
"lastModified": 1760924934,
"narHash": "sha256-tuuqY5aU7cUkR71sO2TraVKK2boYrdW3gCSXUkF4i44=",
"owner": "ipetkov",
"repo": "crane",
"rev": "c6b4d5308293d0d04fcfeee92705017537cad02f",
"type": "github"
},
"original": {
"owner": "ipetkov",
"repo": "crane",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems"
@@ -20,11 +35,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1746232882,
"narHash": "sha256-MHmBH2rS8KkRRdoU/feC/dKbdlMkcNkB5mwkuipVHeQ=",
"lastModified": 1761672384,
"narHash": "sha256-o9KF3DJL7g7iYMZq9SWgfS1BFlNbsm6xplRjVlOCkXI=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "7a2622e2c0dbad5c4493cb268aba12896e28b008",
"rev": "08dacfca559e1d7da38f3cf05f1f45ee9bfd213c",
"type": "github"
},
"original": {
@@ -36,6 +51,7 @@
},
"root": {
"inputs": {
"crane": "crane",
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}

133
flake.nix
View File

@@ -4,77 +4,84 @@
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
crane.url = "github:ipetkov/crane";
};
outputs = {
self,
nixpkgs,
flake-utils,
crane,
}:
flake-utils.lib.eachDefaultSystem (
system: let
pkgs = nixpkgs.legacyPackages.${system};
# --- Minne Package Definition ---
minne-pkg = pkgs.rustPlatform.buildRustPackage {
pname = "minne";
version = "0.1.0";
src = self;
cargoLock = {
lockFile = ./Cargo.lock;
};
# Skip tests due to testing fs operations
doCheck = false;
nativeBuildInputs = [
pkgs.pkg-config
pkgs.rustfmt
pkgs.makeWrapper # For the postInstall hook
];
buildInputs = [
pkgs.openssl
pkgs.chromium # Runtime dependency for the browser
];
# Wrap the actual executables to provide CHROME at runtime
postInstall = let
chromium_executable = "${pkgs.chromium}/bin/chromium";
in ''
wrapProgram $out/bin/main \
--set CHROME "${chromium_executable}"
wrapProgram $out/bin/worker \
--set CHROME "${chromium_executable}"
'';
meta = with pkgs.lib; {
description = "Minne Application";
license = licenses.mit;
};
};
in {
packages = {
minne = minne-pkg;
default = self.packages.${system}.minne;
flake-utils.lib.eachDefaultSystem (system: let
pkgs = nixpkgs.legacyPackages.${system};
lib = pkgs.lib;
craneLib = crane.mkLib pkgs;
libExt =
if pkgs.stdenv.isDarwin
then "dylib"
else "so";
minne-pkg = craneLib.buildPackage {
src = lib.cleanSourceWith {
src = ./.;
filter = let
extraPaths = [
(toString ./Cargo.lock)
(toString ./common/migrations)
(toString ./common/schemas)
(toString ./html-router/templates)
(toString ./html-router/assets)
];
in
path: type: let
p = toString path;
in
craneLib.filterCargoSources path type
|| lib.any (x: lib.hasPrefix x p) extraPaths;
};
apps = {
main = flake-utils.lib.mkApp {
drv = minne-pkg;
name = "main";
};
worker = flake-utils.lib.mkApp {
drv = minne-pkg;
name = "worker";
};
server = flake-utils.lib.mkApp {
drv = minne-pkg;
name = "server";
};
default = self.apps.${system}.main;
pname = "minne";
version = "0.2.6";
doCheck = false;
nativeBuildInputs = [pkgs.pkg-config pkgs.rustfmt pkgs.makeWrapper];
buildInputs = [pkgs.openssl pkgs.chromium pkgs.onnxruntime];
postInstall = ''
wrapProgram $out/bin/main \
--set CHROME ${pkgs.chromium}/bin/chromium \
--set ORT_DYLIB_PATH ${pkgs.onnxruntime}/lib/libonnxruntime.${libExt}
for b in worker server; do
if [ -x "$out/bin/$b" ]; then
wrapProgram $out/bin/$b \
--set CHROME ${pkgs.chromium}/bin/chromium \
--set ORT_DYLIB_PATH ${pkgs.onnxruntime}/lib/libonnxruntime.${libExt}
fi
done
'';
};
in {
packages = {
minne-pkg = minne-pkg;
default = minne-pkg;
};
apps = {
main = flake-utils.lib.mkApp {
drv = minne-pkg;
name = "main";
};
}
);
worker = flake-utils.lib.mkApp {
drv = minne-pkg;
name = "worker";
};
server = flake-utils.lib.mkApp {
drv = minne-pkg;
name = "server";
};
default = flake-utils.lib.mkApp {
drv = minne-pkg;
name = "main";
};
};
});
}

View File

@@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2021"
license = "AGPL-3.0-or-later"
[lints]
workspace = true
[dependencies]
tokio = { workspace = true }
serde = { workspace = true }
@@ -32,6 +35,7 @@ tower-serve-static = { workspace = true }
tokio-util = { workspace = true }
chrono = { workspace = true }
url = { workspace = true }
uuid = { workspace = true }
common = { path = "../common" }
composite-retrieval = { path = "../composite-retrieval" }

View File

@@ -419,6 +419,10 @@
document.addEventListener('DOMContentLoaded', () => tryRender(document));
// HTMX partial swaps
document.body.addEventListener('knowledge-graph-refresh', () => {
tryRender(document);
});
document.body.addEventListener('htmx:afterSettle', (evt) => {
tryRender(evt && evt.target ? evt.target : document);
});

File diff suppressed because one or more lines are too long

View File

@@ -1,6 +1,7 @@
use common::storage::db::SurrealDbClient;
use common::storage::{db::SurrealDbClient, store::StorageManager};
use common::utils::template_engine::{ProvidesTemplateEngine, TemplateEngine};
use common::{create_template_engine, storage::db::ProvidesDb, utils::config::AppConfig};
use composite_retrieval::reranking::RerankerPool;
use std::sync::Arc;
use tracing::debug;
@@ -13,14 +14,18 @@ pub struct HtmlState {
pub templates: Arc<TemplateEngine>,
pub session_store: Arc<SessionStoreType>,
pub config: AppConfig,
pub storage: StorageManager,
pub reranker_pool: Option<Arc<RerankerPool>>,
}
impl HtmlState {
pub fn new_with_resources(
pub async fn new_with_resources(
db: Arc<SurrealDbClient>,
openai_client: Arc<OpenAIClientType>,
session_store: Arc<SessionStoreType>,
storage: StorageManager,
config: AppConfig,
reranker_pool: Option<Arc<RerankerPool>>,
) -> Result<Self, Box<dyn std::error::Error>> {
let template_engine = create_template_engine!("templates");
debug!("Template engine created for html_router.");
@@ -31,6 +36,8 @@ impl HtmlState {
session_store,
templates: Arc::new(template_engine),
config,
storage,
reranker_pool,
})
}
}

View File

@@ -36,6 +36,7 @@ where
.add_protected_routes(routes::content::router())
.add_protected_routes(routes::knowledge::router())
.add_protected_routes(routes::ingestion::router())
.add_protected_routes(routes::scratchpad::router())
.with_compression()
.build()
}

View File

@@ -205,26 +205,26 @@ pub enum HtmlError {
impl From<AppError> for HtmlError {
fn from(err: AppError) -> Self {
HtmlError::AppError(err)
Self::AppError(err)
}
}
impl From<surrealdb::Error> for HtmlError {
fn from(err: surrealdb::Error) -> Self {
HtmlError::AppError(AppError::from(err))
Self::AppError(AppError::from(err))
}
}
impl From<minijinja::Error> for HtmlError {
fn from(err: minijinja::Error) -> Self {
HtmlError::TemplateError(err.to_string())
Self::TemplateError(err.to_string())
}
}
impl IntoResponse for HtmlError {
fn into_response(self) -> Response {
match self {
HtmlError::AppError(err) => match err {
Self::AppError(err) => match err {
AppError::NotFound(_) => TemplateResponse::not_found().into_response(),
AppError::Auth(_) => TemplateResponse::unauthorized().into_response(),
AppError::Validation(msg) => TemplateResponse::bad_request(&msg).into_response(),
@@ -233,7 +233,7 @@ impl IntoResponse for HtmlError {
TemplateResponse::server_error().into_response()
}
},
HtmlError::TemplateError(err) => {
Self::TemplateError(err) => {
error!("Template error: {}", err);
TemplateResponse::server_error().into_response()
}

View File

@@ -118,7 +118,7 @@ where
}
/// Enables response compression when building the router.
pub fn with_compression(mut self) -> Self {
pub const fn with_compression(mut self) -> Self {
self.compression_enabled = true;
self
}

View File

@@ -24,7 +24,10 @@ pub async fn show_account_page(
RequireUser(user): RequireUser,
State(state): State<HtmlState>,
) -> Result<impl IntoResponse, HtmlError> {
let timezones = TZ_VARIANTS.iter().map(|tz| tz.to_string()).collect();
let timezones = TZ_VARIANTS
.iter()
.map(std::string::ToString::to_string)
.collect();
let conversation_archive = User::get_user_conversations(&user.id, &state.db).await?;
Ok(TemplateResponse::new_template(
@@ -102,7 +105,10 @@ pub async fn update_timezone(
..user.clone()
};
let timezones = TZ_VARIANTS.iter().map(|tz| tz.to_string()).collect();
let timezones = TZ_VARIANTS
.iter()
.map(std::string::ToString::to_string)
.collect();
// Render the API key section block
Ok(TemplateResponse::new_partial(

View File

@@ -1,5 +1,9 @@
use async_openai::types::ListModelResponse;
use axum::{extract::State, response::IntoResponse, Form};
use axum::{
extract::{Query, State},
response::IntoResponse,
Form,
};
use serde::{Deserialize, Serialize};
use common::{
@@ -31,44 +35,83 @@ use crate::{
pub struct AdminPanelData {
user: User,
settings: SystemSettings,
analytics: Analytics,
users: i64,
analytics: Option<Analytics>,
users: Option<i64>,
default_query_prompt: String,
default_image_prompt: String,
conversation_archive: Vec<Conversation>,
available_models: ListModelResponse,
available_models: Option<ListModelResponse>,
current_section: AdminSection,
}
#[derive(Debug, Clone, Copy, Serialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum AdminSection {
Overview,
Models,
}
impl Default for AdminSection {
fn default() -> Self {
Self::Overview
}
}
#[derive(Deserialize)]
pub struct AdminPanelQuery {
section: Option<String>,
}
pub async fn show_admin_panel(
State(state): State<HtmlState>,
RequireUser(user): RequireUser,
Query(query): Query<AdminPanelQuery>,
) -> Result<impl IntoResponse, HtmlError> {
let (
settings_res,
analytics_res,
user_count_res,
conversation_archive_res,
available_models_res,
) = tokio::join!(
let section = match query.section.as_deref() {
Some("models") => AdminSection::Models,
_ => AdminSection::Overview,
};
let (settings, conversation_archive) = tokio::try_join!(
SystemSettings::get_current(&state.db),
Analytics::get_current(&state.db),
Analytics::get_users_amount(&state.db),
User::get_user_conversations(&user.id, &state.db),
async { state.openai_client.models().list().await }
);
User::get_user_conversations(&user.id, &state.db)
)?;
let (analytics, users) = if section == AdminSection::Overview {
let (analytics, users) = tokio::try_join!(
Analytics::get_current(&state.db),
Analytics::get_users_amount(&state.db)
)?;
(Some(analytics), Some(users))
} else {
(None, None)
};
let available_models = if section == AdminSection::Models {
Some(
state
.openai_client
.models()
.list()
.await
.map_err(|e| AppError::InternalError(e.to_string()))?,
)
} else {
None
};
Ok(TemplateResponse::new_template(
"admin/base.html",
AdminPanelData {
user,
settings: settings_res?,
analytics: analytics_res?,
available_models: available_models_res
.map_err(|e| AppError::InternalError(e.to_string()))?,
users: user_count_res?,
settings,
analytics,
available_models,
users,
default_query_prompt: DEFAULT_QUERY_SYSTEM_PROMPT.to_string(),
default_image_prompt: DEFAULT_IMAGE_PROCESSING_PROMPT.to_string(),
conversation_archive: conversation_archive_res?,
conversation_archive,
current_section: section,
},
))
}
@@ -103,7 +146,7 @@ pub async fn toggle_registration_status(
// Early return if the user is not admin
if !user.admin {
return Ok(TemplateResponse::redirect("/"));
};
}
let current_settings = SystemSettings::get_current(&state.db).await?;
@@ -115,7 +158,7 @@ pub async fn toggle_registration_status(
SystemSettings::update(&state.db, new_settings.clone()).await?;
Ok(TemplateResponse::new_partial(
"admin/base.html",
"admin/sections/overview.html",
"registration_status_input",
RegistrationToggleData {
settings: new_settings,
@@ -147,7 +190,7 @@ pub async fn update_model_settings(
// Early return if the user is not admin
if !user.admin {
return Ok(TemplateResponse::redirect("/"));
};
}
let current_settings = SystemSettings::get_current(&state.db).await?;
@@ -217,7 +260,7 @@ pub async fn update_model_settings(
.map_err(|_e| AppError::InternalError("Failed to get models".to_string()))?;
Ok(TemplateResponse::new_partial(
"admin/base.html",
"admin/sections/models.html",
"model_settings_form",
ModelSettingsData {
settings: new_settings,
@@ -239,7 +282,7 @@ pub async fn show_edit_system_prompt(
// Early return if the user is not admin
if !user.admin {
return Ok(TemplateResponse::redirect("/"));
};
}
let settings = SystemSettings::get_current(&state.db).await?;
@@ -270,7 +313,7 @@ pub async fn patch_query_prompt(
// Early return if the user is not admin
if !user.admin {
return Ok(TemplateResponse::redirect("/"));
};
}
let current_settings = SystemSettings::get_current(&state.db).await?;
@@ -282,7 +325,7 @@ pub async fn patch_query_prompt(
SystemSettings::update(&state.db, new_settings.clone()).await?;
Ok(TemplateResponse::new_partial(
"admin/base.html",
"admin/sections/overview.html",
"system_prompt_section",
SystemPromptSectionData {
settings: new_settings,
@@ -303,7 +346,7 @@ pub async fn show_edit_ingestion_prompt(
// Early return if the user is not admin
if !user.admin {
return Ok(TemplateResponse::redirect("/"));
};
}
let settings = SystemSettings::get_current(&state.db).await?;
@@ -329,7 +372,7 @@ pub async fn patch_ingestion_prompt(
// Early return if the user is not admin
if !user.admin {
return Ok(TemplateResponse::redirect("/"));
};
}
let current_settings = SystemSettings::get_current(&state.db).await?;
@@ -341,7 +384,7 @@ pub async fn patch_ingestion_prompt(
SystemSettings::update(&state.db, new_settings.clone()).await?;
Ok(TemplateResponse::new_partial(
"admin/base.html",
"admin/sections/overview.html",
"system_prompt_section",
SystemPromptSectionData {
settings: new_settings,
@@ -362,7 +405,7 @@ pub async fn show_edit_image_prompt(
// Early return if the user is not admin
if !user.admin {
return Ok(TemplateResponse::redirect("/"));
};
}
let settings = SystemSettings::get_current(&state.db).await?;
@@ -388,7 +431,7 @@ pub async fn patch_image_prompt(
// Early return if the user is not admin
if !user.admin {
return Ok(TemplateResponse::redirect("/"));
};
}
let current_settings = SystemSettings::get_current(&state.db).await?;
@@ -400,7 +443,7 @@ pub async fn patch_image_prompt(
SystemSettings::update(&state.db, new_settings.clone()).await?;
Ok(TemplateResponse::new_partial(
"admin/base.html",
"admin/sections/overview.html",
"system_prompt_section",
SystemPromptSectionData {
settings: new_settings,

View File

@@ -27,13 +27,14 @@ pub async fn show_signin_form(
if auth.is_authenticated() {
return Ok(TemplateResponse::redirect("/"));
}
match boosted {
true => Ok(TemplateResponse::new_partial(
if boosted {
Ok(TemplateResponse::new_partial(
"auth/signin_base.html",
"body",
(),
)),
false => Ok(TemplateResponse::new_template("auth/signin_base.html", ())),
))
} else {
Ok(TemplateResponse::new_template("auth/signin_base.html", ()))
}
}

View File

@@ -29,13 +29,14 @@ pub async fn show_signup_form(
return Ok(TemplateResponse::redirect("/"));
}
match boosted {
true => Ok(TemplateResponse::new_partial(
if boosted {
Ok(TemplateResponse::new_partial(
"auth/signup_form.html",
"body",
(),
)),
false => Ok(TemplateResponse::new_template("auth/signup_form.html", ())),
))
} else {
Ok(TemplateResponse::new_template("auth/signup_form.html", ()))
}
}
@@ -48,7 +49,7 @@ pub async fn process_signup_and_show_verification(
Ok(user) => user,
Err(e) => {
tracing::error!("{:?}", e);
return Ok(Html(format!("<p>{}</p>", e)).into_response());
return Ok(Html(format!("<p>{e}</p>")).into_response());
}
};

View File

@@ -137,7 +137,7 @@ pub async fn show_existing_chat(
ChatPageData {
history: messages,
user,
conversation: Some(conversation.clone()),
conversation: Some(conversation),
conversation_archive,
},
))
@@ -157,7 +157,7 @@ pub async fn new_user_message(
if conversation.user_id != user.id {
return Ok(TemplateResponse::unauthorized().into_response());
};
}
let user_message = Message::new(conversation_id, MessageRole::User, form.content, None);

View File

@@ -9,11 +9,8 @@ use axum::{
},
};
use composite_retrieval::{
answer_retrieval::{
create_chat_request, create_user_message_with_history, format_entities_json,
LLMResponseFormat,
},
retrieve_entities,
answer_retrieval::{create_chat_request, create_user_message_with_history, LLMResponseFormat},
retrieve_entities, retrieved_entities_to_json,
};
use futures::{
stream::{self, once},
@@ -121,11 +118,17 @@ pub async fn get_response_stream(
};
// 2. Retrieve knowledge entities
let rerank_lease = match state.reranker_pool.as_ref() {
Some(pool) => Some(pool.checkout().await),
None => None,
};
let entities = match retrieve_entities(
&state.db,
&state.openai_client,
&user_message.content,
&user.id,
rerank_lease,
)
.await
{
@@ -136,7 +139,7 @@ pub async fn get_response_stream(
};
// 3. Create the OpenAI request
let entities_json = format_entities_json(&entities);
let entities_json = retrieved_entities_to_json(&entities);
let formatted_user_message =
create_user_message_with_history(&entities_json, &history, &user_message.content);
let settings = match SystemSettings::get_current(&state.db).await {
@@ -251,7 +254,7 @@ pub async fn get_response_stream(
Err(e) => {
yield Ok(Event::default()
.event("error")
.data(format!("Stream error: {}", e)));
.data(format!("Stream error: {e}")));
}
}
}
@@ -260,7 +263,11 @@ pub async fn get_response_stream(
.chain(stream::once(async move {
if let Some(message) = rx_final.recv().await {
// Don't send any event if references is empty
if message.references.as_ref().is_some_and(|x| x.is_empty()) {
if message
.references
.as_ref()
.is_some_and(std::vec::Vec::is_empty)
{
return Ok(Event::default().event("empty")); // This event won't be sent
}

View File

@@ -190,7 +190,7 @@ pub async fn delete_text_content(
TextContent::has_other_with_file(&file_info.id, &text_content.id, &state.db).await?;
if !file_in_use {
FileInfo::delete_by_id(&file_info.id, &state.db, &state.config).await?;
FileInfo::delete_by_id_with_storage(&file_info.id, &state.db, &state.storage).await?;
}
}

View File

@@ -4,6 +4,7 @@ use axum::{
http::{header, HeaderMap, HeaderValue, StatusCode},
response::IntoResponse,
};
use chrono::{DateTime, Utc};
use futures::try_join;
use serde::Serialize;
@@ -16,7 +17,6 @@ use crate::{
utils::text_content_preview::truncate_text_contents,
AuthSessionType,
};
use common::storage::store;
use common::storage::types::user::DashboardStats;
use common::{
error::AppError,
@@ -85,7 +85,7 @@ pub async fn delete_text_content(
TextContent::has_other_with_file(&file_info.id, &text_content.id, &state.db).await?;
if !file_in_use {
FileInfo::delete_by_id(&file_info.id, &state.db, &state.config).await?;
FileInfo::delete_by_id_with_storage(&file_info.id, &state.db, &state.storage).await?;
}
}
@@ -106,7 +106,7 @@ pub async fn delete_text_content(
"dashboard/recent_content.html",
"latest_content_section",
LatestTextContentData {
user: user.to_owned(),
user: user.clone(),
text_contents,
},
))
@@ -139,6 +139,32 @@ pub struct ActiveJobsData {
pub user: User,
}
#[derive(Serialize)]
struct TaskArchiveEntry {
id: String,
state_label: String,
state_raw: String,
attempts: u32,
max_attempts: u32,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
scheduled_at: DateTime<Utc>,
locked_at: Option<DateTime<Utc>>,
last_error_at: Option<DateTime<Utc>>,
error_message: Option<String>,
worker_id: Option<String>,
priority: i32,
lease_duration_secs: i64,
content_kind: String,
content_summary: String,
}
#[derive(Serialize)]
struct TaskArchiveData {
user: User,
tasks: Vec<TaskArchiveEntry>,
}
pub async fn delete_job(
State(state): State<HtmlState>,
RequireUser(user): RequireUser,
@@ -173,6 +199,70 @@ pub async fn show_active_jobs(
))
}
pub async fn show_task_archive(
State(state): State<HtmlState>,
RequireUser(user): RequireUser,
) -> Result<impl IntoResponse, HtmlError> {
let tasks = User::get_all_ingestion_tasks(&user.id, &state.db).await?;
let entries: Vec<TaskArchiveEntry> = tasks
.into_iter()
.map(|task| {
let (content_kind, content_summary) = summarize_task_content(&task);
TaskArchiveEntry {
id: task.id.clone(),
state_label: task.state.display_label().to_string(),
state_raw: task.state.as_str().to_string(),
attempts: task.attempts,
max_attempts: task.max_attempts,
created_at: task.created_at,
updated_at: task.updated_at,
scheduled_at: task.scheduled_at,
locked_at: task.locked_at,
last_error_at: task.last_error_at,
error_message: task.error_message.clone(),
worker_id: task.worker_id.clone(),
priority: task.priority,
lease_duration_secs: task.lease_duration_secs,
content_kind,
content_summary,
}
})
.collect();
Ok(TemplateResponse::new_template(
"dashboard/task_archive_modal.html",
TaskArchiveData {
user,
tasks: entries,
},
))
}
fn summarize_task_content(task: &IngestionTask) -> (String, String) {
match &task.content {
common::storage::types::ingestion_payload::IngestionPayload::Text { text, .. } => {
("Text".to_string(), truncate_summary(text, 80))
}
common::storage::types::ingestion_payload::IngestionPayload::Url { url, .. } => {
("URL".to_string(), url.to_string())
}
common::storage::types::ingestion_payload::IngestionPayload::File { file_info, .. } => {
("File".to_string(), file_info.file_name.clone())
}
}
}
fn truncate_summary(input: &str, max_chars: usize) -> String {
if input.chars().count() <= max_chars {
input.to_string()
} else {
let truncated: String = input.chars().take(max_chars).collect();
format!("{truncated}")
}
}
pub async fn serve_file(
State(state): State<HtmlState>,
RequireUser(user): RequireUser,
@@ -187,7 +277,7 @@ pub async fn serve_file(
return Ok(TemplateResponse::unauthorized().into_response());
}
let stream = match store::get_stream_at(&file_info.path, &state.config).await {
let stream = match state.storage.get_stream(&file_info.path).await {
Ok(s) => s,
Err(_) => return Ok(TemplateResponse::server_error().into_response()),
};

View File

@@ -5,7 +5,9 @@ use axum::{
routing::{delete, get},
Router,
};
use handlers::{delete_job, delete_text_content, index_handler, serve_file, show_active_jobs};
use handlers::{
delete_job, delete_text_content, index_handler, serve_file, show_active_jobs, show_task_archive,
};
use crate::html_state::HtmlState;
@@ -24,6 +26,7 @@ where
{
Router::new()
.route("/jobs/{job_id}", delete(delete_job))
.route("/jobs/archive", get(show_task_archive))
.route("/active-jobs", get(show_active_jobs))
.route("/text-content/{id}", delete(delete_text_content))
.route("/file/{id}", get(serve_file))

View File

@@ -20,7 +20,7 @@ use common::{
storage::types::{
file_info::FileInfo,
ingestion_payload::IngestionPayload,
ingestion_task::{IngestionTask, IngestionTaskStatus},
ingestion_task::{IngestionTask, TaskState},
user::User,
},
};
@@ -98,7 +98,8 @@ pub async fn process_ingress_form(
info!("{:?}", input);
let file_infos = try_join_all(input.files.into_iter().map(|file| {
FileInfo::new(file, &state.db, &user.id, &state.config).map_err(AppError::from)
FileInfo::new_with_storage(file, &state.db, &user.id, &state.storage)
.map_err(AppError::from)
}))
.await?;
@@ -150,13 +151,8 @@ pub async fn get_task_updates_stream(
let db = state.db.clone();
// 1. Check for authenticated user
let current_user = match auth.current_user {
Some(user) => user,
None => {
return Sse::new(create_error_stream(
"User not authenticated. Please log in.",
));
}
let Some(current_user) = auth.current_user else {
return Sse::new(create_error_stream("User not authenticated"));
};
// 2. Fetch task for initial authorization and to ensure it exists
@@ -178,40 +174,54 @@ pub async fn get_task_updates_stream(
Ok(Some(updated_task)) => {
consecutive_db_errors = 0; // Reset error count on success
// Format the status message based on IngestionTaskStatus
let status_message = match &updated_task.status {
IngestionTaskStatus::Created => "Created".to_string(),
IngestionTaskStatus::InProgress { attempts, .. } => {
// Following your template's current display
format!("In progress, attempt {}", attempts)
let status_message = match updated_task.state {
TaskState::Pending => "Pending".to_string(),
TaskState::Reserved => format!(
"Reserved (attempt {} of {})",
updated_task.attempts,
updated_task.max_attempts
),
TaskState::Processing => format!(
"Processing (attempt {} of {})",
updated_task.attempts,
updated_task.max_attempts
),
TaskState::Succeeded => "Completed".to_string(),
TaskState::Failed => {
let mut base = format!(
"Retry scheduled (attempt {} of {})",
updated_task.attempts,
updated_task.max_attempts
);
if let Some(message) = updated_task.error_message.as_ref() {
base.push_str(": ");
base.push_str(message);
}
base
}
IngestionTaskStatus::Completed => "Completed".to_string(),
IngestionTaskStatus::Error { message } => {
// Providing a user-friendly error message from the status
format!("Error: {}", message)
TaskState::Cancelled => "Cancelled".to_string(),
TaskState::DeadLetter => {
let mut base = "Failed permanently".to_string();
if let Some(message) = updated_task.error_message.as_ref() {
base.push_str(": ");
base.push_str(message);
}
base
}
IngestionTaskStatus::Cancelled => "Cancelled".to_string(),
};
yield Ok(Event::default().event("status").data(status_message));
// Check for terminal states to close the stream
match updated_task.status {
IngestionTaskStatus::Completed
| IngestionTaskStatus::Error { .. }
| IngestionTaskStatus::Cancelled => {
// Send a specific event that HTMX uses to close the connection
// Send a event to reload the recent content
// Send a event to remove the loading indicatior
let check_icon = state.templates.render("icons/check_icon.html", &context!{}).unwrap_or("Ok".to_string());
yield Ok(Event::default().event("stop_loading").data(check_icon));
yield Ok(Event::default().event("update_latest_content").data("Update latest content"));
yield Ok(Event::default().event("close_stream").data("Stream complete"));
break; // Exit loop on terminal states
}
_ => {
// Not a terminal state, continue polling
}
if updated_task.state.is_terminal() {
// Send a specific event that HTMX uses to close the connection
// Send a event to reload the recent content
// Send a event to remove the loading indicatior
let check_icon = state.templates.render("icons/check_icon.html", &context!{}).unwrap_or_else(|_| "Ok".to_string());
yield Ok(Event::default().event("stop_loading").data(check_icon));
yield Ok(Event::default().event("update_latest_content").data("Update latest content"));
yield Ok(Event::default().event("close_stream").data("Stream complete"));
break; // Exit loop on terminal states
}
},
Ok(None) => {
@@ -222,10 +232,10 @@ pub async fn get_task_updates_stream(
Err(db_err) => {
error!("Database error while fetching task '{}': {:?}", task_id, db_err);
consecutive_db_errors += 1;
yield Ok(Event::default().event("error").data(format!("Temporary error fetching task update (attempt {}).", consecutive_db_errors)));
yield Ok(Event::default().event("error").data(format!("Temporary error fetching task update (attempt {consecutive_db_errors}).")));
if consecutive_db_errors >= max_consecutive_db_errors {
error!("Max consecutive DB errors reached for task '{}'. Closing stream.", task_id);
error!("Max consecutive DB errors reached for task '{task_id}'. Closing stream.");
yield Ok(Event::default().event("error").data("Persistent error fetching task updates. Stream closed."));
yield Ok(Event::default().event("close_stream").data("Stream complete"));
break;
@@ -243,14 +253,10 @@ pub async fn get_task_updates_stream(
)
}
Ok(None) => Sse::new(create_error_stream(format!(
"Task with ID '{}' not found.",
task_id
"Task with ID '{task_id}' not found."
))),
Err(e) => {
error!(
"Failed to fetch task '{}' for authorization: {:?}",
task_id, e
);
error!("Failed to fetch task '{task_id}' for authorization: {e:?}");
Sse::new(create_error_stream(
"An error occurred while retrieving task details. Please try again later.",
))

View File

@@ -1,20 +1,33 @@
use std::cmp::Ordering;
use std::collections::{HashMap, HashSet};
use std::fmt;
use axum::{
extract::{Path, Query, State},
response::IntoResponse,
http::HeaderValue,
response::{IntoResponse, Response},
Form, Json,
};
use axum_htmx::{HxBoosted, HxRequest};
use serde::{Deserialize, Serialize};
use common::storage::types::{
conversation::Conversation,
knowledge_entity::{KnowledgeEntity, KnowledgeEntityType},
knowledge_relationship::KnowledgeRelationship,
user::User,
use axum_htmx::{HxBoosted, HxRequest, HX_TRIGGER};
use serde::{
de::{self, Deserializer, MapAccess, Visitor},
Deserialize, Serialize,
};
use common::{
error::AppError,
storage::types::{
conversation::Conversation,
knowledge_entity::{KnowledgeEntity, KnowledgeEntityType},
knowledge_relationship::KnowledgeRelationship,
user::User,
},
utils::embedding::generate_embedding,
};
use composite_retrieval::{retrieve_entities, RetrievedEntity};
use tracing::debug;
use uuid::Uuid;
use crate::{
html_state::HtmlState,
middlewares::{
@@ -26,6 +39,95 @@ use crate::{
use url::form_urlencoded;
const KNOWLEDGE_ENTITIES_PER_PAGE: usize = 12;
const RELATIONSHIP_TYPE_OPTIONS: &[&str] = &["RelatedTo", "RelevantTo", "SimilarTo", "References"];
const DEFAULT_RELATIONSHIP_TYPE: &str = RELATIONSHIP_TYPE_OPTIONS[0];
const MAX_RELATIONSHIP_SUGGESTIONS: usize = 10;
const SUGGESTION_MIN_SCORE: f32 = 0.5;
const GRAPH_REFRESH_TRIGGER: &str = r#"{"knowledge-graph-refresh":true}"#;
const RELATIONSHIP_TYPE_ALIASES: &[(&str, &str)] = &[("relatesto", "RelatedTo")];
fn relationship_type_or_default(value: Option<&str>) -> String {
match value {
Some(raw) => canonicalize_relationship_type(raw),
None => DEFAULT_RELATIONSHIP_TYPE.to_string(),
}
}
fn canonicalize_relationship_type(value: &str) -> String {
let trimmed = value.trim();
if trimmed.is_empty() {
return DEFAULT_RELATIONSHIP_TYPE.to_string();
}
let key: String = trimmed
.chars()
.filter(|c| c.is_ascii_alphanumeric())
.flat_map(|c| c.to_lowercase())
.collect();
for option in RELATIONSHIP_TYPE_OPTIONS {
let option_key: String = option
.chars()
.filter(|c| c.is_ascii_alphanumeric())
.flat_map(|c| c.to_lowercase())
.collect();
if option_key == key {
return (*option).to_string();
}
}
for (alias, target) in RELATIONSHIP_TYPE_ALIASES {
if *alias == key {
return (*target).to_string();
}
}
let mut result = String::new();
for segment in trimmed
.split(|c: char| !c.is_ascii_alphanumeric())
.filter(|segment| !segment.is_empty())
{
let mut chars = segment.chars();
if let Some(first) = chars.next() {
result.extend(first.to_uppercase());
for ch in chars {
result.extend(ch.to_lowercase());
}
}
}
if result.is_empty() {
trimmed.to_string()
} else {
result
}
}
fn collect_relationship_type_options(relationships: &[KnowledgeRelationship]) -> Vec<String> {
let mut options: HashSet<String> = RELATIONSHIP_TYPE_OPTIONS
.iter()
.map(|value| (*value).to_string())
.collect();
for relationship in relationships {
options.insert(canonicalize_relationship_type(
&relationship.metadata.relationship_type,
));
}
let mut options: Vec<String> = options.into_iter().collect();
options.sort();
options
}
fn respond_with_graph_refresh(response: TemplateResponse) -> Response {
let mut response = response.into_response();
if let Ok(value) = HeaderValue::from_str(GRAPH_REFRESH_TRIGGER) {
response.headers_mut().insert(HX_TRIGGER, value);
}
response
}
#[derive(Deserialize, Default)]
pub struct FilterParams {
@@ -34,11 +136,200 @@ pub struct FilterParams {
page: Option<usize>,
}
pub async fn show_new_knowledge_entity_form(
State(state): State<HtmlState>,
RequireUser(user): RequireUser,
) -> Result<impl IntoResponse, HtmlError> {
let entity_types: Vec<String> = KnowledgeEntityType::variants()
.iter()
.map(|&s| s.to_owned())
.collect();
let existing_entities = User::get_knowledge_entities(&user.id, &state.db).await?;
let relationships = User::get_knowledge_relationships(&user.id, &state.db).await?;
let relationship_type_options = collect_relationship_type_options(&relationships);
let empty_selected: HashSet<String> = HashSet::new();
let empty_scores: HashMap<String, f32> = HashMap::new();
let relationship_options =
build_relationship_options(existing_entities, &empty_selected, &empty_scores);
Ok(TemplateResponse::new_template(
"knowledge/new_knowledge_entity_modal.html",
NewEntityModalData {
entity_types,
relationship_list: RelationshipListData {
relationship_options,
relationship_type: relationship_type_or_default(None),
suggestion_count: 0,
},
relationship_type_options,
},
))
}
pub async fn create_knowledge_entity(
State(state): State<HtmlState>,
RequireUser(user): RequireUser,
Form(form): Form<CreateKnowledgeEntityParams>,
) -> Result<impl IntoResponse, HtmlError> {
let name = form.name.trim().to_string();
if name.is_empty() {
return Err(AppError::Validation("Name is required".into()).into());
}
let description = form.description.trim().to_string();
let entity_type = KnowledgeEntityType::from(form.entity_type.trim().to_string());
let embedding_input =
format!("name: {name}, description: {description}, type: {entity_type:?}");
let embedding = generate_embedding(&state.openai_client, &embedding_input, &state.db).await?;
let source_id = format!("manual::{}", Uuid::new_v4());
let new_entity = KnowledgeEntity::new(
source_id,
name.clone(),
description.clone(),
entity_type,
None,
embedding,
user.id.clone(),
);
state.db.store_item(new_entity.clone()).await?;
let relationship_type = relationship_type_or_default(form.relationship_type.as_deref());
debug!("form: {:?}", form);
if !form.relationship_ids.is_empty() {
let existing_entities = User::get_knowledge_entities(&user.id, &state.db).await?;
let valid_ids: HashSet<String> = existing_entities
.into_iter()
.map(|entity| entity.id)
.collect();
let mut unique_ids: HashSet<String> = HashSet::new();
for target_id in form.relationship_ids {
if target_id == new_entity.id {
continue;
}
if !valid_ids.contains(&target_id) {
continue;
}
if !unique_ids.insert(target_id.clone()) {
continue;
}
let relationship = KnowledgeRelationship::new(
new_entity.id.clone(),
target_id,
user.id.clone(),
format!("manual::{}", new_entity.id),
relationship_type.clone(),
);
relationship.store_relationship(&state.db).await?;
}
}
let default_params = FilterParams::default();
let kb_data = build_knowledge_base_data(&state, &user, &default_params).await?;
Ok(respond_with_graph_refresh(TemplateResponse::new_partial(
"knowledge/base.html",
"main",
kb_data,
)))
}
pub async fn suggest_knowledge_relationships(
State(state): State<HtmlState>,
RequireUser(user): RequireUser,
Form(form): Form<SuggestRelationshipsParams>,
) -> Result<impl IntoResponse, HtmlError> {
let entity_lookup: HashMap<String, KnowledgeEntity> =
User::get_knowledge_entities(&user.id, &state.db)
.await?
.into_iter()
.map(|entity| (entity.id.clone(), entity))
.collect();
let mut selected_ids: HashSet<String> = form
.relationship_ids
.into_iter()
.filter(|id| entity_lookup.contains_key(id))
.collect();
let mut suggestion_scores: HashMap<String, f32> = HashMap::new();
let mut query_parts = Vec::new();
if let Some(name) = form
.name
.as_deref()
.map(str::trim)
.filter(|v| !v.is_empty())
{
query_parts.push(name.to_string());
}
if let Some(description) = form
.description
.as_deref()
.map(str::trim)
.filter(|v| !v.is_empty())
{
query_parts.push(description.to_string());
}
if !query_parts.is_empty() {
let query = query_parts.join(" ");
let rerank_lease = match state.reranker_pool.as_ref() {
Some(pool) => Some(pool.checkout().await),
None => None,
};
if let Ok(results) = retrieve_entities(
&state.db,
&state.openai_client,
&query,
&user.id,
rerank_lease,
)
.await
{
for RetrievedEntity { entity, score, .. } in results {
if suggestion_scores.len() >= MAX_RELATIONSHIP_SUGGESTIONS {
break;
}
if score.is_nan() || score < SUGGESTION_MIN_SCORE {
continue;
}
if !entity_lookup.contains_key(&entity.id) {
continue;
}
suggestion_scores.insert(entity.id.clone(), score);
selected_ids.insert(entity.id.clone());
}
}
}
let relationship_type = relationship_type_or_default(form.relationship_type.as_deref());
let entities: Vec<KnowledgeEntity> = entity_lookup.into_values().collect();
let relationship_options =
build_relationship_options(entities, &selected_ids, &suggestion_scores);
Ok(TemplateResponse::new_template(
"knowledge/relationship_selector.html",
RelationshipListData {
relationship_options,
relationship_type,
suggestion_count: suggestion_scores.len(),
},
))
}
#[derive(Serialize)]
pub struct KnowledgeBaseData {
entities: Vec<KnowledgeEntity>,
visible_entities: Vec<KnowledgeEntity>,
relationships: Vec<KnowledgeRelationship>,
relationships: Vec<RelationshipTableRow>,
user: User,
entity_types: Vec<String>,
content_categories: Vec<String>,
@@ -47,24 +338,105 @@ pub struct KnowledgeBaseData {
conversation_archive: Vec<Conversation>,
pagination: Pagination,
page_query: String,
relationship_type_options: Vec<String>,
default_relationship_type: String,
}
pub async fn show_knowledge_page(
State(state): State<HtmlState>,
RequireUser(user): RequireUser,
HxRequest(is_htmx): HxRequest,
HxBoosted(is_boosted): HxBoosted,
Query(mut params): Query<FilterParams>,
) -> Result<impl IntoResponse, HtmlError> {
// Normalize filters: treat empty or "none" as no filter
params.entity_type = normalize_filter(params.entity_type.take());
params.content_category = normalize_filter(params.content_category.take());
#[derive(Serialize)]
pub struct RelationshipOption {
entity: KnowledgeEntity,
is_selected: bool,
is_suggested: bool,
score: Option<f32>,
}
// Load relevant data
#[derive(Serialize)]
pub struct RelationshipTableRow {
relationship: KnowledgeRelationship,
relationship_type_label: String,
}
fn build_relationship_options(
entities: Vec<KnowledgeEntity>,
selected_ids: &HashSet<String>,
suggestion_scores: &HashMap<String, f32>,
) -> Vec<RelationshipOption> {
let mut options: Vec<RelationshipOption> = entities
.into_iter()
.map(|entity| {
let id = entity.id.clone();
let score = suggestion_scores.get(&id).copied();
RelationshipOption {
entity,
is_selected: selected_ids.contains(&id),
is_suggested: score.is_some(),
score,
}
})
.collect();
options.sort_by(|a, b| match (a.is_suggested, b.is_suggested) {
(true, false) => Ordering::Less,
(false, true) => Ordering::Greater,
_ => match (a.score, b.score) {
(Some(a_score), Some(b_score)) => {
b_score.partial_cmp(&a_score).unwrap_or(Ordering::Equal)
}
(Some(_), None) => Ordering::Less,
(None, Some(_)) => Ordering::Greater,
_ => a
.entity
.name
.to_lowercase()
.cmp(&b.entity.name.to_lowercase()),
},
});
options
}
fn build_relationship_table_data(
entities: Vec<KnowledgeEntity>,
relationships: Vec<KnowledgeRelationship>,
) -> RelationshipTableData {
let relationship_type_options = collect_relationship_type_options(&relationships);
let mut frequency: HashMap<String, usize> = HashMap::new();
let relationships = relationships
.into_iter()
.map(|relationship| {
let relationship_type_label =
canonicalize_relationship_type(&relationship.metadata.relationship_type);
*frequency
.entry(relationship_type_label.clone())
.or_insert(0) += 1;
RelationshipTableRow {
relationship,
relationship_type_label,
}
})
.collect();
let default_relationship_type = frequency
.into_iter()
.max_by_key(|(_, count)| *count)
.map(|(label, _)| label)
.unwrap_or_else(|| DEFAULT_RELATIONSHIP_TYPE.to_string());
RelationshipTableData {
entities,
relationships,
relationship_type_options,
default_relationship_type,
}
}
async fn build_knowledge_base_data(
state: &HtmlState,
user: &User,
params: &FilterParams,
) -> Result<KnowledgeBaseData, AppError> {
let entity_types = User::get_entity_types(&user.id, &state.db).await?;
let content_categories = User::get_user_categories(&user.id, &state.db).await?;
// Load entities based on filters
let entities = match &params.content_category {
Some(cat) => {
User::get_knowledge_entities_by_content_category(&user.id, cat, &state.db).await?
@@ -90,23 +462,29 @@ pub async fn show_knowledge_page(
if encoded.is_empty() {
String::new()
} else {
format!("&{}", encoded)
format!("&{encoded}")
}
};
let relationships = User::get_knowledge_relationships(&user.id, &state.db).await?;
let entity_id_set: HashSet<String> = entities.iter().map(|e| e.id.clone()).collect();
let relationships: Vec<KnowledgeRelationship> = relationships
let filtered_relationships: Vec<KnowledgeRelationship> = relationships
.into_iter()
.filter(|rel| entity_id_set.contains(&rel.in_) && entity_id_set.contains(&rel.out))
.collect();
let RelationshipTableData {
entities: _,
relationships,
relationship_type_options,
default_relationship_type,
} = build_relationship_table_data(entities.clone(), filtered_relationships);
let conversation_archive = User::get_user_conversations(&user.id, &state.db).await?;
let kb_data = KnowledgeBaseData {
Ok(KnowledgeBaseData {
entities,
visible_entities,
relationships,
user,
user: user.clone(),
entity_types,
content_categories,
selected_entity_type: params.entity_type.clone(),
@@ -114,7 +492,247 @@ pub async fn show_knowledge_page(
conversation_archive,
pagination,
page_query,
};
relationship_type_options,
default_relationship_type,
})
}
#[derive(Serialize)]
pub struct RelationshipListData {
relationship_options: Vec<RelationshipOption>,
relationship_type: String,
suggestion_count: usize,
}
#[derive(Serialize)]
pub struct NewEntityModalData {
entity_types: Vec<String>,
relationship_list: RelationshipListData,
relationship_type_options: Vec<String>,
}
#[derive(Debug)]
pub struct CreateKnowledgeEntityParams {
pub name: String,
pub entity_type: String,
pub description: String,
pub relationship_type: Option<String>,
pub relationship_ids: Vec<String>,
}
impl<'de> Deserialize<'de> for CreateKnowledgeEntityParams {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
#[derive(Deserialize)]
#[serde(field_identifier, rename_all = "snake_case")]
enum Field {
Name,
EntityType,
Description,
RelationshipType,
#[serde(alias = "relationship_ids[]")]
RelationshipIds,
}
struct ParamsVisitor;
impl<'de> Visitor<'de> for ParamsVisitor {
type Value = CreateKnowledgeEntityParams;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("struct CreateKnowledgeEntityParams")
}
fn visit_map<M>(self, mut map: M) -> Result<Self::Value, M::Error>
where
M: MapAccess<'de>,
{
let mut name: Option<String> = None;
let mut entity_type: Option<String> = None;
let mut description: Option<String> = None;
let mut relationship_type: Option<String> = None;
let mut relationship_ids: Vec<String> = Vec::new();
while let Some(key) = map.next_key::<Field>()? {
match key {
Field::Name => {
if name.is_some() {
return Err(de::Error::duplicate_field("name"));
}
name = Some(map.next_value()?);
}
Field::EntityType => {
if entity_type.is_some() {
return Err(de::Error::duplicate_field("entity_type"));
}
entity_type = Some(map.next_value()?);
}
Field::Description => {
description = Some(map.next_value()?);
}
Field::RelationshipType => {
relationship_type = Some(map.next_value()?);
}
Field::RelationshipIds => {
let value: String = map.next_value()?;
let trimmed = value.trim();
if !trimmed.is_empty() {
relationship_ids.push(trimmed.to_owned());
}
}
}
}
let name = name.ok_or_else(|| de::Error::missing_field("name"))?;
let entity_type =
entity_type.ok_or_else(|| de::Error::missing_field("entity_type"))?;
let description = description.unwrap_or_default();
let relationship_type = relationship_type
.map(|value: String| value.trim().to_owned())
.filter(|value| !value.is_empty());
Ok(CreateKnowledgeEntityParams {
name,
entity_type,
description,
relationship_type,
relationship_ids,
})
}
}
const FIELDS: &[&str] = &[
"name",
"entity_type",
"description",
"relationship_type",
"relationship_ids",
];
deserializer.deserialize_struct("CreateKnowledgeEntityParams", FIELDS, ParamsVisitor)
}
}
#[derive(Debug)]
pub struct SuggestRelationshipsParams {
pub name: Option<String>,
pub description: Option<String>,
pub relationship_type: Option<String>,
pub relationship_ids: Vec<String>,
}
impl<'de> Deserialize<'de> for SuggestRelationshipsParams {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
#[derive(Deserialize)]
#[serde(field_identifier, rename_all = "snake_case")]
enum Field {
Name,
Description,
RelationshipType,
EntityType,
#[serde(alias = "relationship_ids[]")]
RelationshipIds,
}
struct ParamsVisitor;
impl<'de> Visitor<'de> for ParamsVisitor {
type Value = SuggestRelationshipsParams;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("struct SuggestRelationshipsParams")
}
fn visit_map<M>(self, mut map: M) -> Result<Self::Value, M::Error>
where
M: MapAccess<'de>,
{
let mut name: Option<String> = None;
let mut description: Option<String> = None;
let mut relationship_type: Option<String> = None;
let mut relationship_ids: Vec<String> = Vec::new();
while let Some(key) = map.next_key::<Field>()? {
match key {
Field::Name => {
if name.is_some() {
return Err(de::Error::duplicate_field("name"));
}
let value: String = map.next_value()?;
let trimmed = value.trim();
if !trimmed.is_empty() {
name = Some(trimmed.to_owned());
}
}
Field::Description => {
let value: String = map.next_value()?;
let trimmed = value.trim();
if trimmed.is_empty() {
description = None;
} else {
description = Some(trimmed.to_owned());
}
}
Field::RelationshipType => {
let value: String = map.next_value()?;
let trimmed = value.trim();
if trimmed.is_empty() {
relationship_type = None;
} else {
relationship_type = Some(trimmed.to_owned());
}
}
Field::EntityType => {
map.next_value::<de::IgnoredAny>()?;
}
Field::RelationshipIds => {
let value: String = map.next_value()?;
let trimmed = value.trim();
if !trimmed.is_empty() {
relationship_ids.push(trimmed.to_owned());
}
}
}
}
Ok(SuggestRelationshipsParams {
name,
description,
relationship_type,
relationship_ids,
})
}
}
const FIELDS: &[&str] = &[
"name",
"description",
"relationship_type",
"entity_type",
"relationship_ids",
];
deserializer.deserialize_struct("SuggestRelationshipsParams", FIELDS, ParamsVisitor)
}
}
pub async fn show_knowledge_page(
State(state): State<HtmlState>,
RequireUser(user): RequireUser,
HxRequest(is_htmx): HxRequest,
HxBoosted(is_boosted): HxBoosted,
Query(mut params): Query<FilterParams>,
) -> Result<impl IntoResponse, HtmlError> {
// Normalize filters: treat empty or "none" as no filter
params.entity_type = normalize_filter(params.entity_type.take());
params.content_category = normalize_filter(params.content_category.take());
let kb_data = build_knowledge_base_data(&state, &user, &params).await?;
// Determine response type:
// If it is an HTMX request but NOT a boosted navigation, send partial update (main block only)
@@ -182,7 +800,7 @@ pub async fn get_knowledge_graph_json(
let mut degree_count: HashMap<String, usize> = HashMap::new();
let mut links: Vec<GraphLink> = Vec::new();
for rel in relationships.iter() {
for rel in &relationships {
if entity_ids.contains(&rel.in_) && entity_ids.contains(&rel.out) {
// undirected counting for degree
*degree_count.entry(rel.in_.clone()).or_insert(0) += 1;
@@ -190,7 +808,7 @@ pub async fn get_knowledge_graph_json(
links.push(GraphLink {
source: rel.out.clone(),
target: rel.in_.clone(),
relationship_type: rel.metadata.relationship_type.clone(),
relationship_type: canonicalize_relationship_type(&rel.metadata.relationship_type),
});
}
}
@@ -209,17 +827,14 @@ pub async fn get_knowledge_graph_json(
}
// Normalize filter parameters: convert empty strings or "none" (case-insensitive) to None
fn normalize_filter(input: Option<String>) -> Option<String> {
match input {
None => None,
Some(s) => {
let trimmed = s.trim();
if trimmed.is_empty() || trimmed.eq_ignore_ascii_case("none") {
None
} else {
Some(trim_matching_quotes(trimmed).to_string())
}
input.and_then(|s| {
let trimmed = s.trim();
if trimmed.is_empty() || trimmed.eq_ignore_ascii_case("none") {
None
} else {
Some(trim_matching_quotes(trimmed).to_string())
}
}
})
}
fn trim_matching_quotes(value: &str) -> &str {
@@ -249,7 +864,7 @@ pub async fn show_edit_knowledge_entity_form(
// Get entity types
let entity_types: Vec<String> = KnowledgeEntityType::variants()
.iter()
.map(|s| s.to_string())
.map(|&s| s.to_owned())
.collect();
// Get the entity and validate ownership
@@ -259,8 +874,8 @@ pub async fn show_edit_knowledge_entity_form(
"knowledge/edit_knowledge_entity_modal.html",
EntityData {
entity,
user,
entity_types,
user,
},
))
}
@@ -320,7 +935,7 @@ pub async fn patch_knowledge_entity(
let content_categories = User::get_user_categories(&user.id, &state.db).await?;
// Render updated list
Ok(TemplateResponse::new_template(
Ok(respond_with_graph_refresh(TemplateResponse::new_template(
"knowledge/entity_list.html",
EntityListData {
visible_entities,
@@ -332,7 +947,7 @@ pub async fn patch_knowledge_entity(
selected_content_category: None,
page_query: String::new(),
},
))
)))
}
pub async fn delete_knowledge_entity(
@@ -359,7 +974,7 @@ pub async fn delete_knowledge_entity(
// Get content categories
let content_categories = User::get_user_categories(&user.id, &state.db).await?;
Ok(TemplateResponse::new_template(
Ok(respond_with_graph_refresh(TemplateResponse::new_template(
"knowledge/entity_list.html",
EntityListData {
visible_entities,
@@ -371,13 +986,15 @@ pub async fn delete_knowledge_entity(
selected_content_category: None,
page_query: String::new(),
},
))
)))
}
#[derive(Serialize)]
pub struct RelationshipTableData {
entities: Vec<KnowledgeEntity>,
relationships: Vec<KnowledgeRelationship>,
relationships: Vec<RelationshipTableRow>,
relationship_type_options: Vec<String>,
default_relationship_type: String,
}
pub async fn delete_knowledge_relationship(
@@ -390,15 +1007,13 @@ pub async fn delete_knowledge_relationship(
let entities = User::get_knowledge_entities(&user.id, &state.db).await?;
let relationships = User::get_knowledge_relationships(&user.id, &state.db).await?;
let table_data = build_relationship_table_data(entities, relationships);
// Render updated list
Ok(TemplateResponse::new_template(
Ok(respond_with_graph_refresh(TemplateResponse::new_template(
"knowledge/relationship_table.html",
RelationshipTableData {
entities,
relationships,
},
))
table_data,
)))
}
#[derive(Deserialize)]
@@ -414,12 +1029,13 @@ pub async fn save_knowledge_relationship(
Form(form): Form<SaveKnowledgeRelationshipInput>,
) -> Result<impl IntoResponse, HtmlError> {
// Construct relationship
let relationship_type = canonicalize_relationship_type(&form.relationship_type);
let relationship = KnowledgeRelationship::new(
form.in_,
form.out,
user.id.clone(),
"manual".into(),
form.relationship_type,
relationship_type,
);
relationship.store_relationship(&state.db).await?;
@@ -427,13 +1043,11 @@ pub async fn save_knowledge_relationship(
let entities = User::get_knowledge_entities(&user.id, &state.db).await?;
let relationships = User::get_knowledge_relationships(&user.id, &state.db).await?;
let table_data = build_relationship_table_data(entities, relationships);
// Render updated list
Ok(TemplateResponse::new_template(
Ok(respond_with_graph_refresh(TemplateResponse::new_template(
"knowledge/relationship_table.html",
RelationshipTableData {
entities,
relationships,
},
))
table_data,
)))
}

View File

@@ -6,9 +6,10 @@ use axum::{
Router,
};
use handlers::{
delete_knowledge_entity, delete_knowledge_relationship, get_knowledge_graph_json,
patch_knowledge_entity, save_knowledge_relationship, show_edit_knowledge_entity_form,
show_knowledge_page,
create_knowledge_entity, delete_knowledge_entity, delete_knowledge_relationship,
get_knowledge_graph_json, patch_knowledge_entity, save_knowledge_relationship,
show_edit_knowledge_entity_form, show_knowledge_page, show_new_knowledge_entity_form,
suggest_knowledge_relationships,
};
use crate::html_state::HtmlState;
@@ -21,12 +22,18 @@ where
Router::new()
.route("/knowledge", get(show_knowledge_page))
.route("/knowledge/graph.json", get(get_knowledge_graph_json))
.route("/knowledge-entity/new", get(show_new_knowledge_entity_form))
.route("/knowledge-entity", post(create_knowledge_entity))
.route(
"/knowledge-entity/{id}",
get(show_edit_knowledge_entity_form)
.delete(delete_knowledge_entity)
.patch(patch_knowledge_entity),
)
.route(
"/knowledge-entity/suggestions",
post(suggest_knowledge_relationships),
)
.route("/knowledge-relationship", post(save_knowledge_relationship))
.route(
"/knowledge-relationship/{id}",

View File

@@ -6,4 +6,5 @@ pub mod content;
pub mod index;
pub mod ingestion;
pub mod knowledge;
pub mod scratchpad;
pub mod search;

View File

@@ -0,0 +1,557 @@
use axum::{
extract::{Path, Query, State},
http::{HeaderValue, StatusCode},
response::{IntoResponse, Response},
Form,
};
use axum_htmx::{HxBoosted, HxRequest, HX_TRIGGER};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use crate::html_state::HtmlState;
use crate::middlewares::{
auth_middleware::RequireUser,
response_middleware::{HtmlError, TemplateResponse},
};
use common::storage::types::{
conversation::Conversation, ingestion_payload::IngestionPayload, ingestion_task::IngestionTask,
scratchpad::Scratchpad, user::User,
};
#[derive(Serialize)]
pub struct ScratchpadPageData {
user: User,
scratchpads: Vec<ScratchpadListItem>,
archived_scratchpads: Vec<ScratchpadArchiveItem>,
conversation_archive: Vec<Conversation>,
#[serde(skip_serializing_if = "Option::is_none")]
new_scratchpad: Option<ScratchpadDetail>,
}
#[derive(Serialize)]
pub struct ScratchpadListItem {
id: String,
title: String,
content: String,
last_saved_at: DateTime<Utc>,
}
#[derive(Serialize)]
pub struct ScratchpadDetailData {
user: User,
scratchpad: ScratchpadDetail,
conversation_archive: Vec<Conversation>,
}
#[derive(Serialize)]
pub struct ScratchpadArchiveItem {
id: String,
title: String,
archived_at: Option<DateTime<Utc>>,
#[serde(skip_serializing_if = "Option::is_none")]
ingested_at: Option<DateTime<Utc>>,
}
#[derive(Serialize)]
pub struct ScratchpadDetail {
id: String,
title: String,
content: String,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
last_saved_at: DateTime<Utc>,
is_dirty: bool,
}
#[derive(Serialize)]
pub struct AutoSaveResponse {
success: bool,
last_saved_at_display: String,
last_saved_at_iso: String,
}
impl From<&Scratchpad> for ScratchpadListItem {
fn from(value: &Scratchpad) -> Self {
Self {
id: value.id.clone(),
title: value.title.clone(),
content: value.content.clone(),
last_saved_at: value.last_saved_at,
}
}
}
impl From<&Scratchpad> for ScratchpadArchiveItem {
fn from(value: &Scratchpad) -> Self {
Self {
id: value.id.clone(),
title: value.title.clone(),
archived_at: value.archived_at,
ingested_at: value.ingested_at,
}
}
}
impl From<&Scratchpad> for ScratchpadDetail {
fn from(value: &Scratchpad) -> Self {
Self {
id: value.id.clone(),
title: value.title.clone(),
content: value.content.clone(),
created_at: value.created_at,
updated_at: value.updated_at,
last_saved_at: value.last_saved_at,
is_dirty: value.is_dirty,
}
}
}
#[derive(Deserialize)]
pub struct CreateScratchpadForm {
title: String,
}
#[derive(Deserialize)]
pub struct UpdateScratchpadForm {
content: String,
}
#[derive(Deserialize)]
pub struct UpdateTitleForm {
title: String,
}
#[derive(Deserialize)]
pub struct EditTitleQuery {
edit_title: Option<bool>,
}
pub async fn show_scratchpad_page(
RequireUser(user): RequireUser,
HxRequest(is_htmx): HxRequest,
HxBoosted(is_boosted): HxBoosted,
State(state): State<HtmlState>,
) -> Result<impl IntoResponse, HtmlError> {
let scratchpads = Scratchpad::get_by_user(&user.id, &state.db).await?;
let archived_scratchpads = Scratchpad::get_archived_by_user(&user.id, &state.db).await?;
let conversation_archive = User::get_user_conversations(&user.id, &state.db).await?;
let scratchpad_list: Vec<ScratchpadListItem> =
scratchpads.iter().map(ScratchpadListItem::from).collect();
let archived_list: Vec<ScratchpadArchiveItem> = archived_scratchpads
.iter()
.map(ScratchpadArchiveItem::from)
.collect();
if is_htmx && !is_boosted {
Ok(TemplateResponse::new_partial(
"scratchpad/base.html",
"main",
ScratchpadPageData {
user,
scratchpads: scratchpad_list,
archived_scratchpads: archived_list,
conversation_archive,
new_scratchpad: None,
},
))
} else {
Ok(TemplateResponse::new_template(
"scratchpad/base.html",
ScratchpadPageData {
user,
scratchpads: scratchpad_list,
archived_scratchpads: archived_list,
conversation_archive,
new_scratchpad: None,
},
))
}
}
pub async fn show_scratchpad_modal(
RequireUser(user): RequireUser,
State(state): State<HtmlState>,
Path(scratchpad_id): Path<String>,
Query(query): Query<EditTitleQuery>,
) -> Result<impl IntoResponse, HtmlError> {
let scratchpad = Scratchpad::get_by_id(&scratchpad_id, &user.id, &state.db).await?;
let conversation_archive = User::get_user_conversations(&user.id, &state.db).await?;
let scratchpad_detail = ScratchpadDetail::from(&scratchpad);
// Handle edit_title query parameter if needed in future
let _ = query.edit_title.unwrap_or(false);
Ok(TemplateResponse::new_template(
"scratchpad/editor_modal.html",
ScratchpadDetailData {
user,
scratchpad: scratchpad_detail,
conversation_archive,
},
))
}
pub async fn create_scratchpad(
RequireUser(user): RequireUser,
State(state): State<HtmlState>,
Form(form): Form<CreateScratchpadForm>,
) -> Result<impl IntoResponse, HtmlError> {
let user_id = user.id.clone();
let scratchpad = Scratchpad::new(user_id.clone(), form.title);
let _stored = state.db.store_item(scratchpad.clone()).await?;
let scratchpads = Scratchpad::get_by_user(&user.id, &state.db).await?;
let archived_scratchpads = Scratchpad::get_archived_by_user(&user.id, &state.db).await?;
let conversation_archive = User::get_user_conversations(&user.id, &state.db).await?;
let scratchpad_list: Vec<ScratchpadListItem> =
scratchpads.iter().map(ScratchpadListItem::from).collect();
let archived_list: Vec<ScratchpadArchiveItem> = archived_scratchpads
.iter()
.map(ScratchpadArchiveItem::from)
.collect();
Ok(TemplateResponse::new_partial(
"scratchpad/base.html",
"main",
ScratchpadPageData {
user,
scratchpads: scratchpad_list,
archived_scratchpads: archived_list,
conversation_archive,
new_scratchpad: Some(ScratchpadDetail::from(&scratchpad)),
},
))
}
pub async fn auto_save_scratchpad(
RequireUser(user): RequireUser,
State(state): State<HtmlState>,
Path(scratchpad_id): Path<String>,
Form(form): Form<UpdateScratchpadForm>,
) -> Result<impl IntoResponse, HtmlError> {
let updated =
Scratchpad::update_content(&scratchpad_id, &user.id, &form.content, &state.db).await?;
// Return a success indicator for auto-save
Ok(axum::Json(AutoSaveResponse {
success: true,
last_saved_at_display: updated
.last_saved_at
.format("%Y-%m-%d %H:%M:%S")
.to_string(),
last_saved_at_iso: updated.last_saved_at.to_rfc3339(),
}))
}
pub async fn update_scratchpad_title(
RequireUser(user): RequireUser,
State(state): State<HtmlState>,
Path(scratchpad_id): Path<String>,
Form(form): Form<UpdateTitleForm>,
) -> Result<impl IntoResponse, HtmlError> {
Scratchpad::update_title(&scratchpad_id, &user.id, &form.title, &state.db).await?;
let scratchpad = Scratchpad::get_by_id(&scratchpad_id, &user.id, &state.db).await?;
let conversation_archive = User::get_user_conversations(&user.id, &state.db).await?;
Ok(TemplateResponse::new_template(
"scratchpad/editor_modal.html",
ScratchpadDetailData {
user,
scratchpad: ScratchpadDetail::from(&scratchpad),
conversation_archive,
},
))
}
pub async fn delete_scratchpad(
RequireUser(user): RequireUser,
State(state): State<HtmlState>,
Path(scratchpad_id): Path<String>,
) -> Result<impl IntoResponse, HtmlError> {
Scratchpad::delete(&scratchpad_id, &user.id, &state.db).await?;
// Return the updated main section content
let scratchpads = Scratchpad::get_by_user(&user.id, &state.db).await?;
let conversation_archive = User::get_user_conversations(&user.id, &state.db).await?;
let archived_scratchpads = Scratchpad::get_archived_by_user(&user.id, &state.db).await?;
let scratchpad_list: Vec<ScratchpadListItem> =
scratchpads.iter().map(ScratchpadListItem::from).collect();
let archived_list: Vec<ScratchpadArchiveItem> = archived_scratchpads
.iter()
.map(ScratchpadArchiveItem::from)
.collect();
Ok(TemplateResponse::new_partial(
"scratchpad/base.html",
"main",
ScratchpadPageData {
user,
scratchpads: scratchpad_list,
archived_scratchpads: archived_list,
conversation_archive,
new_scratchpad: None,
},
))
}
pub async fn ingest_scratchpad(
RequireUser(user): RequireUser,
State(state): State<HtmlState>,
Path(scratchpad_id): Path<String>,
) -> Result<impl IntoResponse, HtmlError> {
let scratchpad = Scratchpad::get_by_id(&scratchpad_id, &user.id, &state.db).await?;
if scratchpad.content.trim().is_empty() {
let trigger_payload = serde_json::json!({
"toast": {
"title": "Ingestion skipped",
"description": "Cannot ingest an empty scratchpad.",
"type": "warning"
}
});
let trigger_value = serde_json::to_string(&trigger_payload).unwrap_or_else(|_| {
r#"{"toast":{"title":"Ingestion skipped","description":"Cannot ingest an empty scratchpad.","type":"warning"}}"#.to_string()
});
let mut response = Response::builder()
.status(StatusCode::BAD_REQUEST)
.body(axum::body::Body::empty())
.unwrap_or_else(|_| Response::new(axum::body::Body::empty()));
if let Ok(header_value) = HeaderValue::from_str(&trigger_value) {
response.headers_mut().insert(HX_TRIGGER, header_value);
}
return Ok(response);
}
// Create ingestion task
let payload = IngestionPayload::Text {
text: scratchpad.content.clone(),
context: format!("Scratchpad: {}", scratchpad.title),
category: "scratchpad".to_string(),
user_id: user.id.clone(),
};
let task = IngestionTask::new(payload, user.id.clone());
state.db.store_item(task).await?;
// Archive the scratchpad once queued for ingestion
Scratchpad::archive(&scratchpad_id, &user.id, &state.db, true).await?;
let scratchpads = Scratchpad::get_by_user(&user.id, &state.db).await?;
let archived_scratchpads = Scratchpad::get_archived_by_user(&user.id, &state.db).await?;
let conversation_archive = User::get_user_conversations(&user.id, &state.db).await?;
let scratchpad_list: Vec<ScratchpadListItem> =
scratchpads.iter().map(ScratchpadListItem::from).collect();
let archived_list: Vec<ScratchpadArchiveItem> = archived_scratchpads
.iter()
.map(ScratchpadArchiveItem::from)
.collect();
let trigger_payload = serde_json::json!({
"toast": {
"title": "Ingestion queued",
"description": format!("\"{}\" archived and added to the ingestion queue.", scratchpad.title),
"type": "success"
}
});
let trigger_value = serde_json::to_string(&trigger_payload).unwrap_or_else(|_| {
r#"{"toast":{"title":"Ingestion queued","description":"Scratchpad archived and added to the ingestion queue.","type":"success"}}"#.to_string()
});
let template_response = TemplateResponse::new_partial(
"scratchpad/base.html",
"main",
ScratchpadPageData {
user,
scratchpads: scratchpad_list,
archived_scratchpads: archived_list,
conversation_archive,
new_scratchpad: None,
},
);
let mut response = template_response.into_response();
if let Ok(header_value) = HeaderValue::from_str(&trigger_value) {
response.headers_mut().insert(HX_TRIGGER, header_value);
}
Ok(response)
}
pub async fn archive_scratchpad(
RequireUser(user): RequireUser,
State(state): State<HtmlState>,
Path(scratchpad_id): Path<String>,
) -> Result<impl IntoResponse, HtmlError> {
Scratchpad::archive(&scratchpad_id, &user.id, &state.db, false).await?;
let scratchpads = Scratchpad::get_by_user(&user.id, &state.db).await?;
let archived_scratchpads = Scratchpad::get_archived_by_user(&user.id, &state.db).await?;
let conversation_archive = User::get_user_conversations(&user.id, &state.db).await?;
let scratchpad_list: Vec<ScratchpadListItem> =
scratchpads.iter().map(ScratchpadListItem::from).collect();
let archived_list: Vec<ScratchpadArchiveItem> = archived_scratchpads
.iter()
.map(ScratchpadArchiveItem::from)
.collect();
Ok(TemplateResponse::new_template(
"scratchpad/base.html",
ScratchpadPageData {
user,
scratchpads: scratchpad_list,
archived_scratchpads: archived_list,
conversation_archive,
new_scratchpad: None,
},
))
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::Utc;
#[test]
fn test_scratchpad_list_item_conversion() {
// Create a test scratchpad with datetime values
let now = Utc::now();
let mut scratchpad = common::storage::types::scratchpad::Scratchpad::new(
"test_user".to_string(),
"Test Scratchpad".to_string(),
);
// Override the timestamps with known values for testing
scratchpad.last_saved_at = now;
// Test conversion to ScratchpadListItem
let list_item = ScratchpadListItem::from(&scratchpad);
assert_eq!(list_item.id, scratchpad.id);
assert_eq!(list_item.title, scratchpad.title);
assert_eq!(list_item.content, scratchpad.content);
assert_eq!(list_item.last_saved_at, scratchpad.last_saved_at);
}
#[test]
fn test_scratchpad_detail_conversion() {
// Create a test scratchpad with datetime values
let now = Utc::now();
let mut scratchpad = common::storage::types::scratchpad::Scratchpad::new(
"test_user".to_string(),
"Test Scratchpad".to_string(),
);
// Override the timestamps with known values for testing
scratchpad.last_saved_at = now;
// Test conversion to ScratchpadDetail
let detail = ScratchpadDetail::from(&scratchpad);
assert_eq!(detail.id, scratchpad.id);
assert_eq!(detail.title, scratchpad.title);
assert_eq!(detail.content, scratchpad.content);
assert_eq!(detail.created_at, scratchpad.created_at);
assert_eq!(detail.updated_at, scratchpad.updated_at);
assert_eq!(detail.last_saved_at, scratchpad.last_saved_at);
assert_eq!(detail.is_dirty, scratchpad.is_dirty);
}
#[test]
fn test_scratchpad_archive_item_conversion() {
// Create a test scratchpad with optional datetime values
let now = Utc::now();
let mut scratchpad = common::storage::types::scratchpad::Scratchpad::new(
"test_user".to_string(),
"Test Scratchpad".to_string(),
);
// Set optional datetime fields
scratchpad.archived_at = Some(now);
scratchpad.ingested_at = Some(now);
// Test conversion to ScratchpadArchiveItem
let archive_item = ScratchpadArchiveItem::from(&scratchpad);
assert_eq!(archive_item.id, scratchpad.id);
assert_eq!(archive_item.title, scratchpad.title);
assert_eq!(archive_item.archived_at, scratchpad.archived_at);
assert_eq!(archive_item.ingested_at, scratchpad.ingested_at);
}
#[test]
fn test_scratchpad_archive_item_conversion_with_none_values() {
// Create a test scratchpad without optional datetime values
let scratchpad = common::storage::types::scratchpad::Scratchpad::new(
"test_user".to_string(),
"Test Scratchpad".to_string(),
);
// Test conversion to ScratchpadArchiveItem
let archive_item = ScratchpadArchiveItem::from(&scratchpad);
assert_eq!(archive_item.id, scratchpad.id);
assert_eq!(archive_item.title, scratchpad.title);
assert_eq!(archive_item.archived_at, None);
assert_eq!(archive_item.ingested_at, None);
}
}
pub async fn restore_scratchpad(
RequireUser(user): RequireUser,
State(state): State<HtmlState>,
Path(scratchpad_id): Path<String>,
) -> Result<impl IntoResponse, HtmlError> {
Scratchpad::restore(&scratchpad_id, &user.id, &state.db).await?;
let scratchpads = Scratchpad::get_by_user(&user.id, &state.db).await?;
let archived_scratchpads = Scratchpad::get_archived_by_user(&user.id, &state.db).await?;
let conversation_archive = User::get_user_conversations(&user.id, &state.db).await?;
let scratchpad_list: Vec<ScratchpadListItem> =
scratchpads.iter().map(ScratchpadListItem::from).collect();
let archived_list: Vec<ScratchpadArchiveItem> = archived_scratchpads
.iter()
.map(ScratchpadArchiveItem::from)
.collect();
let trigger_payload = serde_json::json!({
"toast": {
"title": "Scratchpad restored",
"description": "The scratchpad is back in your active list.",
"type": "info"
}
});
let trigger_value = serde_json::to_string(&trigger_payload).unwrap_or_else(|_| {
r#"{"toast":{"title":"Scratchpad restored","description":"The scratchpad is back in your active list.","type":"info"}}"#.to_string()
});
let template_response = TemplateResponse::new_partial(
"scratchpad/base.html",
"main",
ScratchpadPageData {
user,
scratchpads: scratchpad_list,
archived_scratchpads: archived_list,
conversation_archive,
new_scratchpad: None,
},
);
let mut response = template_response.into_response();
if let Ok(header_value) = HeaderValue::from_str(&trigger_value) {
response.headers_mut().insert(HX_TRIGGER, header_value);
}
Ok(response)
}

View File

@@ -0,0 +1,40 @@
mod handlers;
use axum::{
extract::FromRef,
routing::{delete, get, patch, post},
Router,
};
use crate::html_state::HtmlState;
pub fn router<S>() -> Router<S>
where
S: Clone + Send + Sync + 'static,
HtmlState: FromRef<S>,
{
Router::new()
.route("/scratchpad", get(handlers::show_scratchpad_page))
.route("/scratchpad", post(handlers::create_scratchpad))
.route(
"/scratchpad/{id}/modal",
get(handlers::show_scratchpad_modal),
)
.route(
"/scratchpad/{id}/auto-save",
patch(handlers::auto_save_scratchpad),
)
.route(
"/scratchpad/{id}/title",
patch(handlers::update_scratchpad_title),
)
.route("/scratchpad/{id}", delete(handlers::delete_scratchpad))
.route(
"/scratchpad/{id}/archive",
post(handlers::archive_scratchpad),
)
.route("/scratchpad/{id}/ingest", post(handlers::ingest_scratchpad))
.route(
"/scratchpad/{id}/restore",
post(handlers::restore_scratchpad),
)
}

View File

@@ -6,9 +6,11 @@ use axum::{
};
use common::storage::types::{
conversation::Conversation,
knowledge_entity::{KnowledgeEntity, KnowledgeEntitySearchResult},
text_content::{TextContent, TextContentSearchResult},
user::User,
};
use futures::future::try_join;
use serde::{de, Deserialize, Deserializer, Serialize};
use crate::{
@@ -43,9 +45,19 @@ pub async fn search_result_handler(
Query(params): Query<SearchParams>,
RequireUser(user): RequireUser,
) -> Result<impl IntoResponse, HtmlError> {
#[derive(Serialize)]
struct SearchResultForTemplate {
result_type: String,
score: f32,
#[serde(skip_serializing_if = "Option::is_none")]
text_content: Option<TextContentSearchResult>,
#[serde(skip_serializing_if = "Option::is_none")]
knowledge_entity: Option<KnowledgeEntitySearchResult>,
}
#[derive(Serialize)]
pub struct AnswerData {
search_result: Vec<TextContentSearchResult>,
search_result: Vec<SearchResultForTemplate>,
query_param: String,
user: User,
conversation_archive: Vec<Conversation>,
@@ -56,17 +68,45 @@ pub async fn search_result_handler(
if let Some(actual_query) = params.query {
let trimmed_query = actual_query.trim();
if trimmed_query.is_empty() {
(Vec::new(), String::new())
(Vec::<SearchResultForTemplate>::new(), String::new())
} else {
match TextContent::search(&state.db, trimmed_query, &user.id, 5).await {
Ok(results) => (results, trimmed_query.to_string()),
Err(e) => {
return Err(HtmlError::from(e));
}
const TOTAL_LIMIT: usize = 10;
let (text_results, entity_results) = try_join(
TextContent::search(&state.db, trimmed_query, &user.id, TOTAL_LIMIT),
KnowledgeEntity::search(&state.db, trimmed_query, &user.id, TOTAL_LIMIT),
)
.await?;
let mut combined_results: Vec<SearchResultForTemplate> =
Vec::with_capacity(text_results.len() + entity_results.len());
for text_result in text_results {
let score = text_result.score;
combined_results.push(SearchResultForTemplate {
result_type: "text_content".to_string(),
score,
text_content: Some(text_result),
knowledge_entity: None,
});
}
for entity_result in entity_results {
let score = entity_result.score;
combined_results.push(SearchResultForTemplate {
result_type: "knowledge_entity".to_string(),
score,
text_content: None,
knowledge_entity: Some(entity_result),
});
}
combined_results.sort_by(|a, b| b.score.total_cmp(&a.score));
combined_results.truncate(TOTAL_LIMIT);
(combined_results, trimmed_query.to_string())
}
} else {
(Vec::new(), String::new())
(Vec::<SearchResultForTemplate>::new(), String::new())
};
Ok(TemplateResponse::new_template(

View File

@@ -16,7 +16,7 @@ pub struct Pagination {
}
impl Pagination {
pub fn new(
pub const fn new(
current_page: usize,
per_page: usize,
total_items: usize,

View File

@@ -3,154 +3,49 @@
{% block title %}Minne - Admin{% endblock %}
{% block main %}
<div class="flex justify-center grow mt-2 sm:mt-4 pb-4">
<div class="container">
<section class="mb-4">
<div class="nb-panel p-3 flex items-center justify-between">
<h1 class="text-xl font-extrabold tracking-tight">Admin Dashboard</h1>
<div id="admin-shell" class="flex justify-center grow mt-2 sm:mt-4 pb-4">
<div class="container flex flex-col gap-4">
<section class="nb-panel p-4 sm:p-5 flex flex-col gap-3 sm:flex-row sm:items-start sm:justify-between">
<div>
<h1 class="text-xl font-extrabold tracking-tight">Admin Controls</h1>
<p class="text-sm opacity-70 max-w-2xl">
Stay on top of analytics and manage AI integrations without waiting on long-running model calls.
</p>
</div>
<div class="text-xs opacity-60 sm:text-right">
Signed in as <span class="font-medium">{{ user.email }}</span>
</div>
</section>
<section class="mb-4">
<div class="grid grid-cols-1 sm:grid-cols-3 gap-4">
<div class="nb-stat">
<div class="text-xs opacity-70">Page Loads</div>
<div class="text-3xl font-extrabold">{{analytics.page_loads}}</div>
<div class="text-xs opacity-60">Total page load events</div>
</div>
<div class="nb-stat">
<div class="text-xs opacity-70">Unique Visitors</div>
<div class="text-3xl font-extrabold">{{analytics.visitors}}</div>
<div class="text-xs opacity-60">Distinct users by fingerprint</div>
</div>
<div class="nb-stat">
<div class="text-xs opacity-70">Users</div>
<div class="text-3xl font-extrabold">{{users}}</div>
<div class="text-xs opacity-60">Registered accounts</div>
</div>
</div>
</section>
<nav
class="nb-panel p-2 flex flex-wrap gap-2 text-sm"
hx-boost="true"
hx-target="#admin-shell"
hx-select="#admin-shell"
hx-swap="outerHTML"
hx-push-url="true"
>
<a
href="/admin?section=overview"
class="nb-btn btn-sm px-4 {% if current_section == 'overview' %}nb-cta{% else %}btn-ghost{% endif %}"
>
Overview
</a>
<a
href="/admin?section=models"
class="nb-btn btn-sm px-4 {% if current_section == 'models' %}nb-cta{% else %}btn-ghost{% endif %}"
>
Models
</a>
</nav>
<section class="grid grid-cols-1 xl:grid-cols-2 gap-4">
{% block system_prompt_section %}
<div id="system_prompt_section" class="nb-panel p-4">
<div class="text-sm font-semibold mb-3">System Prompts</div>
<div class="flex gap-2 flex-col sm:flex-row">
<button type="button" class="nb-btn btn-sm" hx-get="/edit-query-prompt" hx-target="#modal" hx-swap="innerHTML">Edit Query Prompt</button>
<button type="button" class="nb-btn btn-sm" hx-get="/edit-ingestion-prompt" hx-target="#modal" hx-swap="innerHTML">Edit Ingestion Prompt</button>
<button type="button" class="nb-btn btn-sm" hx-get="/edit-image-prompt" hx-target="#modal" hx-swap="innerHTML">Edit Image Prompt</button>
</div>
</div>
{% endblock %}
<div class="nb-panel p-4">
<div class="text-sm font-semibold mb-3">AI Models</div>
{% block model_settings_form %}
<form hx-patch="/update-model-settings" hx-swap="outerHTML" class="grid grid-cols-1 gap-4">
<!-- Query Model -->
<div>
<div class="text-sm opacity-80 mb-1">Query Model</div>
<select name="query_model" class="nb-select w-full">
{% for model in available_models.data %}
<option value="{{model.id}}" {% if settings.query_model==model.id %} selected {% endif %}>{{model.id}}</option>
{% endfor %}
</select>
<p class="text-xs opacity-70 mt-1">Current: <span class="font-mono">{{settings.query_model}}</span></p>
</div>
<!-- Processing Model -->
<div>
<div class="text-sm opacity-80 mb-1">Processing Model</div>
<select name="processing_model" class="nb-select w-full">
{% for model in available_models.data %}
<option value="{{model.id}}" {% if settings.processing_model==model.id %} selected {% endif %}>{{model.id}}</option>
{% endfor %}
</select>
<p class="text-xs opacity-70 mt-1">Current: <span class="font-mono">{{settings.processing_model}}</span></p>
</div>
<!-- Image Processing Model -->
<div>
<div class="text-sm opacity-80 mb-1">Image Processing Model</div>
<select name="image_processing_model" class="nb-select w-full">
{% for model in available_models.data %}
<option value="{{model.id}}" {% if settings.image_processing_model==model.id %} selected {% endif %}>{{model.id}}</option>
{% endfor %}
</select>
<p class="text-xs opacity-70 mt-1">Current: <span class="font-mono">{{settings.image_processing_model}}</span></p>
</div>
<!-- Voice Processing Model -->
<div>
<div class="text-sm opacity-80 mb-1">Voice Processing Model</div>
<select name="voice_processing_model" class="nb-select w-full">
{% for model in available_models.data %}
<option value="{{model.id}}" {% if settings.voice_processing_model==model.id %} selected {% endif %}>{{model.id}}</option>
{% endfor %}
</select>
<p class="text-xs opacity-70 mt-1">Current: <span class="font-mono">{{settings.voice_processing_model}}</span></p>
</div>
<!-- Embedding Model -->
<div>
<div class="text-sm opacity-80 mb-1">Embedding Model</div>
<select name="embedding_model" class="nb-select w-full">
{% for model in available_models.data %}
<option value="{{model.id}}" {% if settings.embedding_model==model.id %} selected {% endif %}>{{model.id}}</option>
{% endfor %}
</select>
<p class="text-xs opacity-70 mt-1">Current: <span class="font-mono">{{settings.embedding_model}} ({{settings.embedding_dimensions}} dims)</span></p>
</div>
<!-- Embedding Dimensions -->
<div>
<div class="text-sm opacity-80 mb-1" for="embedding_dimensions">Embedding Dimensions</div>
<input type="number" id="embedding_dimensions" name="embedding_dimensions" class="nb-input w-full" value="{{ settings.embedding_dimensions }}" required />
</div>
<!-- Alert -->
<div id="embedding-change-alert" class="nb-panel p-3 bg-warning/20 hidden">
<div class="text-sm"><strong>Warning:</strong> Changing dimensions will require re-creating all embeddings. Look up your model's required dimensions or use a model that allows specifying them.</div>
</div>
<div class="flex justify-end">
<button type="submit" class="nb-btn nb-cta btn-sm">Save Model Settings</button>
</div>
</form>
<script>
// Rebind after HTMX swaps
(() => {
const dimensionInput = document.getElementById('embedding_dimensions');
const alertElement = document.getElementById('embedding-change-alert');
const initialDimensions = '{{ settings.embedding_dimensions }}';
if (dimensionInput && alertElement) {
dimensionInput.addEventListener('input', (event) => {
if (String(event.target.value) !== String(initialDimensions)) {
alertElement.classList.remove('hidden');
} else {
alertElement.classList.add('hidden');
}
});
}
})();
</script>
{% endblock %}
</div>
<div class="nb-panel p-4">
<div class="text-sm font-semibold mb-3">Registration</div>
<label class="flex items-center gap-3">
{% block registration_status_input %}
<form hx-patch="/toggle-registrations" hx-swap="outerHTML" hx-trigger="change">
<input name="registration_open" type="checkbox" class="nb-checkbox" {% if settings.registrations_enabled %}checked{% endif %} />
</form>
{% endblock %}
<span class="text-sm">Enable Registrations</span>
</label>
<div id="registration-status" class="text-xs opacity-70 mt-2"></div>
</div>
</section>
<div id="admin-content" class="flex flex-col gap-4">
{% if current_section == 'models' %}
{% include 'admin/sections/models.html' %}
{% else %}
{% include 'admin/sections/overview.html' %}
{% endif %}
</div>
</div>
</div>
{% endblock %}

View File

@@ -0,0 +1,130 @@
<section class="nb-panel p-4 sm:p-5 flex flex-col gap-4">
<div class="flex items-start justify-between flex-col sm:flex-row gap-3">
<div>
<div class="text-sm uppercase tracking-wide opacity-60 mb-1">AI Models</div>
<h2 class="text-lg font-semibold">Model configuration</h2>
<p class="text-xs opacity-70 max-w-2xl">
Choose which models power conversational search, ingestion analysis, and embeddings. Adjusting embeddings may trigger a full reprocess.
</p>
</div>
<a
href="/admin?section=overview"
class="nb-btn btn-sm btn-ghost"
hx-boost="true"
hx-target="#admin-shell"
hx-select="#admin-shell"
hx-swap="outerHTML"
hx-push-url="true"
>
← Back to Admin
</a>
</div>
{% if available_models %}
{% block model_settings_form %}
<form hx-patch="/update-model-settings" hx-swap="outerHTML" class="grid grid-cols-1 gap-4">
<div class="grid grid-cols-1 sm:grid-cols-2 gap-4">
<div>
<div class="text-sm opacity-80 mb-1">Query Model</div>
<select name="query_model" class="nb-select w-full">
{% for model in available_models.data %}
<option value="{{ model.id }}" {% if settings.query_model == model.id %}selected{% endif %}>{{ model.id }}</option>
{% endfor %}
</select>
<p class="text-xs opacity-70 mt-1">Current: <span class="font-mono">{{ settings.query_model }}</span></p>
</div>
<div>
<div class="text-sm opacity-80 mb-1">Processing Model</div>
<select name="processing_model" class="nb-select w-full">
{% for model in available_models.data %}
<option value="{{ model.id }}" {% if settings.processing_model == model.id %}selected{% endif %}>{{ model.id }}</option>
{% endfor %}
</select>
<p class="text-xs opacity-70 mt-1">Current: <span class="font-mono">{{ settings.processing_model }}</span></p>
</div>
</div>
<div class="grid grid-cols-1 sm:grid-cols-2 gap-4">
<div>
<div class="text-sm opacity-80 mb-1">Image Processing Model</div>
<select name="image_processing_model" class="nb-select w-full">
{% for model in available_models.data %}
<option value="{{ model.id }}" {% if settings.image_processing_model == model.id %}selected{% endif %}>{{ model.id }}</option>
{% endfor %}
</select>
<p class="text-xs opacity-70 mt-1">Current: <span class="font-mono">{{ settings.image_processing_model }}</span></p>
</div>
<div>
<div class="text-sm opacity-80 mb-1">Voice Processing Model</div>
<select name="voice_processing_model" class="nb-select w-full">
{% for model in available_models.data %}
<option value="{{ model.id }}" {% if settings.voice_processing_model == model.id %}selected{% endif %}>{{ model.id }}</option>
{% endfor %}
</select>
<p class="text-xs opacity-70 mt-1">Current: <span class="font-mono">{{ settings.voice_processing_model }}</span></p>
</div>
</div>
<div class="grid grid-cols-1 sm:grid-cols-2 gap-4">
<div>
<div class="text-sm opacity-80 mb-1">Embedding Model</div>
<select name="embedding_model" class="nb-select w-full">
{% for model in available_models.data %}
<option value="{{ model.id }}" {% if settings.embedding_model == model.id %}selected{% endif %}>{{ model.id }}</option>
{% endfor %}
</select>
<p class="text-xs opacity-70 mt-1">Current: <span class="font-mono">{{ settings.embedding_model }}</span></p>
</div>
<div>
<div class="text-sm opacity-80 mb-1" for="embedding_dimensions">Embedding Dimensions</div>
<input
type="number"
id="embedding_dimensions"
name="embedding_dimensions"
class="nb-input w-full"
value="{{ settings.embedding_dimensions }}"
required
min="1"
/>
<p class="text-xs opacity-70 mt-1">Changing dimensions will trigger a background re-embedding.</p>
</div>
</div>
<div id="embedding-change-alert" class="nb-panel p-3 bg-warning/20 hidden">
<div class="text-sm">
<strong>Warning:</strong> Changing dimensions recreates embeddings for text chunks and knowledge entities. Confirm the target model requires the new value.
</div>
</div>
<div class="flex justify-end gap-2">
<button type="submit" class="nb-btn nb-cta btn-sm">Save Model Settings</button>
</div>
</form>
<script>
(() => {
const dimensionInput = document.getElementById('embedding_dimensions');
const alertElement = document.getElementById('embedding-change-alert');
const initialDimensions = '{{ settings.embedding_dimensions }}';
if (dimensionInput && alertElement) {
dimensionInput.addEventListener('input', (event) => {
if (String(event.target.value) !== String(initialDimensions)) {
alertElement.classList.remove('hidden');
} else {
alertElement.classList.add('hidden');
}
});
}
})();
</script>
{% endblock %}
{% else %}
<div class="nb-panel p-4 bg-warning/10 border border-warning/40">
<div class="text-sm font-semibold mb-1">Unable to load models</div>
<p class="text-xs opacity-70">We could not reach the model provider. Check the API key and retry.</p>
</div>
{% endif %}
</section>

View File

@@ -0,0 +1,57 @@
{% if analytics %}
<section class="grid grid-cols-1 sm:grid-cols-3 gap-4">
<div class="nb-stat">
<div class="text-xs opacity-70">Page Loads</div>
<div class="text-3xl font-extrabold">{{ analytics.page_loads }}</div>
<div class="text-xs opacity-60">Total load events seen by Minne</div>
</div>
<div class="nb-stat">
<div class="text-xs opacity-70">Unique Visitors</div>
<div class="text-3xl font-extrabold">{{ analytics.visitors }}</div>
<div class="text-xs opacity-60">Distinct users by fingerprint</div>
</div>
<div class="nb-stat">
<div class="text-xs opacity-70">Users</div>
<div class="text-3xl font-extrabold">{{ users or 0 }}</div>
<div class="text-xs opacity-60">Registered accounts</div>
</div>
</section>
{% else %}
<section class="nb-panel p-4">
<div class="text-sm font-semibold mb-2">Analytics unavailable</div>
<p class="text-xs opacity-70">We could not fetch analytics for this view. Reload or check the monitoring pipeline.</p>
</section>
{% endif %}
<section class="grid grid-cols-1 xl:grid-cols-2 gap-4">
{% block system_prompt_section %}
<div id="system_prompt_section" class="nb-panel p-4">
<div class="flex items-start justify-between gap-2 mb-3">
<div>
<div class="text-sm font-semibold">System Prompts</div>
<p class="text-xs opacity-70">Adjust the prompts that power retrieval, ingestion analysis, and image processing flows.</p>
</div>
<span class="text-[10px] uppercase tracking-wide opacity-60">LLM</span>
</div>
<div class="flex gap-2 flex-col sm:flex-row">
<button type="button" class="nb-btn btn-sm" hx-get="/edit-query-prompt" hx-target="#modal" hx-swap="innerHTML">Edit Query Prompt</button>
<button type="button" class="nb-btn btn-sm" hx-get="/edit-ingestion-prompt" hx-target="#modal" hx-swap="innerHTML">Edit Ingestion Prompt</button>
<button type="button" class="nb-btn btn-sm" hx-get="/edit-image-prompt" hx-target="#modal" hx-swap="innerHTML">Edit Image Prompt</button>
</div>
</div>
{% endblock %}
<div class="nb-panel p-4">
<div class="text-sm font-semibold mb-2">Registration</div>
<p class="text-xs opacity-60 mb-3">Toggle whether new people can sign up without an invite.</p>
<label class="flex items-center gap-3">
{% block registration_status_input %}
<form hx-patch="/toggle-registrations" hx-swap="outerHTML" hx-trigger="change">
<input name="registration_open" type="checkbox" class="nb-checkbox" {% if settings.registrations_enabled %}checked{% endif %} />
</form>
{% endblock %}
<span class="text-sm">Enable Registrations</span>
</label>
<div id="registration-status" class="text-xs opacity-70 mt-2"></div>
</div>
</section>

View File

@@ -2,10 +2,16 @@
<section id="active_jobs_section" class="nb-panel p-4 space-y-4 mt-6 sm:mt-8">
<header class="flex flex-wrap items-center justify-between gap-3">
<h2 class="text-xl font-extrabold tracking-tight">Active Tasks</h2>
<button class="nb-btn btn-square btn-sm" hx-get="/active-jobs" hx-target="#active_jobs_section" hx-swap="outerHTML"
aria-label="Refresh active tasks">
{% include "icons/refresh_icon.html" %}
</button>
<div class="flex gap-2">
<button class="nb-btn btn-square btn-sm" hx-get="/active-jobs" hx-target="#active_jobs_section" hx-swap="outerHTML"
aria-label="Refresh active tasks">
{% include "icons/refresh_icon.html" %}
</button>
<button class="nb-btn btn-sm" hx-get="/jobs/archive" hx-target="#modal" hx-swap="innerHTML"
aria-label="View task archive">
View Archive
</button>
</div>
</header>
{% if active_jobs %}
<ul class="flex flex-col gap-3 list-none p-0 m-0">
@@ -23,12 +29,18 @@
</div>
<div class="space-y-1">
<div class="text-sm font-semibold">
{% if item.status.name == "InProgress" %}
In progress, attempt {{ item.status.attempts }}
{% elif item.status.name == "Error" %}
Error: {{ item.status.message }}
{% if item.state == "Processing" %}
Processing, attempt {{ item.attempts }} of {{ item.max_attempts }}
{% elif item.state == "Reserved" %}
Reserved, attempt {{ item.attempts }} of {{ item.max_attempts }}
{% elif item.state == "Failed" %}
Retry scheduled (attempt {{ item.attempts }} of {{ item.max_attempts }}){% if item.error_message %}: {{ item.error_message }}{% endif %}
{% elif item.state == "DeadLetter" %}
Failed permanently{% if item.error_message %}: {{ item.error_message }}{% endif %}
{% elif item.state == "Succeeded" %}
Completed
{% else %}
{{ item.status.name }}
{{ item.state }}
{% endif %}
</div>
<div class="text-xs font-semibold opacity-60">
@@ -60,4 +72,4 @@
</ul>
{% endif %}
</section>
{% endblock %}
{% endblock %}

View File

@@ -8,7 +8,7 @@
</div>
<div class="space-y-1">
<div class="text-sm font-semibold flex gap-2 items-center">
<span sse-swap="status" hx-swap="innerHTML">Created</span>
<span sse-swap="status" hx-swap="innerHTML">Pending</span>
<div hx-get="/content/recent" hx-target="#latest_content_section" hx-swap="outerHTML"
hx-trigger="sse:update_latest_content"></div>
</div>

View File

@@ -0,0 +1,152 @@
{% extends "modal_base.html" %}
{% block modal_class %}w-11/12 max-w-[90ch] max-h-[95%] overflow-y-auto{% endblock %}
{% block form_attributes %}onsubmit="event.preventDefault();"{% endblock %}
{% block modal_content %}
<h3 class="text-xl font-extrabold tracking-tight flex items-center gap-2">
Ingestion Task Archive
<span class="badge badge-neutral text-xs font-normal">{{ tasks|length }} total</span>
</h3>
<p class="text-sm opacity-70">A history of all ingestion tasks for {{ user.email }}.</p>
{% if tasks %}
<div class="hidden lg:block overflow-x-auto nb-card mt-4">
<table class="nb-table">
<thead>
<tr>
<th class="text-left">Content</th>
<th class="text-left">State</th>
<th class="text-left">Attempts</th>
<th class="text-left">Scheduled</th>
<th class="text-left">Updated</th>
<th class="text-left">Worker</th>
<th class="text-left">Error</th>
</tr>
</thead>
<tbody>
{% for task in tasks %}
<tr>
<td>
<div class="flex flex-col gap-1">
<div class="text-sm font-semibold">{{ task.content_kind }}</div>
<div class="text-xs opacity-70 break-words">{{ task.content_summary }}</div>
<div class="text-[11px] opacity-60 lowercase tracking-wider">{{ task.id }}</div>
</div>
</td>
<td>
<span class="badge badge-primary badge-outline tracking-wide">{{ task.state_label }}</span>
</td>
<td>
<div class="text-sm font-semibold">{{ task.attempts }} / {{ task.max_attempts }}</div>
<div class="text-xs opacity-60">Priority {{ task.priority }}</div>
</td>
<td>
<div class="text-sm">
{{ task.scheduled_at|datetimeformat(format="short", tz=user.timezone) }}
</div>
{% if task.locked_at %}
<div class="text-xs opacity-60">Locked {{ task.locked_at|datetimeformat(format="short", tz=user.timezone) }}
</div>
{% endif %}
</td>
<td>
<div class="text-sm">
{{ task.updated_at|datetimeformat(format="short", tz=user.timezone) }}
</div>
<div class="text-xs opacity-60">Created {{ task.created_at|datetimeformat(format="short", tz=user.timezone) }}
</div>
</td>
<td>
{% if task.worker_id %}
<span class="text-sm font-semibold">{{ task.worker_id }}</span>
<div class="text-xs opacity-60">Lease {{ task.lease_duration_secs }}s</div>
{% else %}
<span class="text-xs opacity-60">Not assigned</span>
{% endif %}
</td>
<td>
{% if task.error_message %}
<div class="text-sm text-error font-semibold">{{ task.error_message }}</div>
{% if task.last_error_at %}
<div class="text-xs opacity-60">{{ task.last_error_at|datetimeformat(format="short", tz=user.timezone) }}
</div>
{% endif %}
{% else %}
<span class="text-xs opacity-60"></span>
{% endif %}
</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
<div class="lg:hidden flex flex-col gap-3 mt-4">
{% for task in tasks %}
<details class="nb-panel p-3 space-y-3">
<summary class="flex items-center justify-between gap-2 text-sm font-semibold cursor-pointer">
<span>{{ task.content_kind }}</span>
<span class="badge badge-primary badge-outline tracking-wide">{{ task.state_label }}</span>
</summary>
<div class="text-xs opacity-70 break-words">{{ task.content_summary }}</div>
<div class="text-[11px] opacity-60 lowercase tracking-wider">{{ task.id }}</div>
<div class="grid grid-cols-1 gap-2 text-xs">
<div class="flex justify-between">
<span class="opacity-60 uppercase tracking-wide">Attempts</span>
<span class="text-sm font-semibold">{{ task.attempts }} / {{ task.max_attempts }}</span>
</div>
<div class="flex justify-between">
<span class="opacity-60 uppercase tracking-wide">Priority</span>
<span class="text-sm font-semibold">{{ task.priority }}</span>
</div>
<div class="flex justify-between">
<span class="opacity-60 uppercase tracking-wide">Scheduled</span>
<span>{{ task.scheduled_at|datetimeformat(format="short", tz=user.timezone) }}</span>
</div>
<div class="flex justify-between">
<span class="opacity-60 uppercase tracking-wide">Updated</span>
<span>{{ task.updated_at|datetimeformat(format="short", tz=user.timezone) }}</span>
</div>
<div class="flex justify-between">
<span class="opacity-60 uppercase tracking-wide">Created</span>
<span>{{ task.created_at|datetimeformat(format="short", tz=user.timezone) }}</span>
</div>
<div class="flex justify-between">
<span class="opacity-60 uppercase tracking-wide">Worker</span>
{% if task.worker_id %}
<span class="text-sm font-semibold">{{ task.worker_id }}</span>
{% else %}
<span class="opacity-60">Unassigned</span>
{% endif %}
</div>
<div class="flex justify-between">
<span class="opacity-60 uppercase tracking-wide">Lease</span>
<span>{{ task.lease_duration_secs }}s</span>
</div>
{% if task.locked_at %}
<div class="flex justify-between">
<span class="opacity-60 uppercase tracking-wide">Locked</span>
<span>{{ task.locked_at|datetimeformat(format="short", tz=user.timezone) }}</span>
</div>
{% endif %}
</div>
{% if task.error_message or task.last_error_at %}
<div class="border-t border-base-200 pt-2 text-xs space-y-1">
{% if task.error_message %}
<div class="text-sm text-error font-semibold">{{ task.error_message }}</div>
{% endif %}
{% if task.last_error_at %}
<div class="opacity-60">Last error {{ task.last_error_at|datetimeformat(format="short", tz=user.timezone) }}</div>
{% endif %}
</div>
{% endif %}
</details>
{% endfor %}
</div>
{% else %}
<p class="text-sm opacity-70 mt-4">No tasks yet. Start an ingestion to populate the archive.</p>
{% endif %}
{% endblock %}
{% block primary_actions %}{% endblock %}

View File

@@ -6,6 +6,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
<title>{% block title %}Minne{% endblock %}</title>
<!-- Preload critical assets -->
<link rel="preload" href="/assets/htmx.min.js" as="script">
<link rel="preload" href="/assets/style.css" as="style">
@@ -77,4 +78,4 @@
window.renderAllMarkdown = renderAllMarkdown;
</script>
</html>
</html>

View File

@@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor"
width="20" height="20" class="size-6">
<path stroke-linecap="round" stroke-linejoin="round"
d="M3 17.25V21h3.75L17.81 9.94l-3.75-3.75L3 17.25zM20.71 7.04c.39-.39.39-1.02 0-1.41l-2.34-2.34c-.39-.39-1.02-.39-1.41 0l-1.83 1.83 3.75 3.75 1.83-1.83z" />
</svg>

After

Width:  |  Height:  |  Size: 376 B

View File

@@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor"
width="20" height="20" class="size-6">
<path stroke-linecap="round" stroke-linejoin="round"
d="M3 17.25V21h3.75L17.81 9.94l-3.75-3.75L3 17.25zM20.71 7.04c.39-.39.39-1.02 0-1.41l-2.34-2.34c-.39-.39-1.02-.39-1.41 0l-1.83 1.83 3.75 3.75 1.83-1.83z" />
</svg>

After

Width:  |  Height:  |  Size: 376 B

View File

@@ -5,8 +5,14 @@
{% block main %}
<div id="knowledge_pane" class="flex justify-center grow mt-2 sm:mt-4 gap-6">
<div class="container">
<div class="nb-panel p-3 mb-4 flex flex-col sm:flex-row justify-between items-start sm:items-center">
<h2 class="text-xl font-extrabold tracking-tight">Knowledge Entities</h2>
<div class="nb-panel p-3 mb-4 space-y-3 sm:space-y-0 sm:flex sm:flex-row sm:justify-between sm:items-center">
<div class="flex flex-col gap-2 sm:flex-row sm:items-center sm:gap-3">
<h2 class="text-xl font-extrabold tracking-tight">Knowledge Entities</h2>
<button type="button" class="nb-btn nb-cta btn-sm mr-2" hx-get="/knowledge-entity/new" hx-target="#modal"
hx-swap="innerHTML">
New Entity
</button>
</div>
<form hx-get="/knowledge" hx-target="#knowledge_pane" hx-push-url="true" hx-swap="outerHTML"
class="flex items-center gap-2 mt-2 sm:mt-0">
<input type="hidden" name="page" value="1" />

View File

@@ -0,0 +1,79 @@
{% extends "modal_base.html" %}
{% block modal_class %}max-w-4xl w-full{% endblock %}
{% block form_attributes %}
hx-post="/knowledge-entity"
hx-target="#knowledge_pane"
hx-swap="outerHTML"
{% endblock %}
{% block modal_content %}
<h3 class="text-xl font-extrabold tracking-tight">Create Knowledge Entity</h3>
<div class="flex flex-col gap-3">
<label class="w-full">
<div class="text-xs uppercase tracking-wide opacity-70 mb-1">Name</div>
<input type="text" name="name" class="nb-input w-full" placeholder="Entity title" required>
</label>
<label class="w-full">
<div class="text-xs uppercase tracking-wide opacity-70 mb-1">Type</div>
<select name="entity_type" class="nb-select w-full">
{% for et in entity_types %}
<option value="{{ et }}">{{ et }}</option>
{% endfor %}
</select>
</label>
<label class="w-full">
<div class="text-xs uppercase tracking-wide opacity-70 mb-1">Description</div>
<textarea name="description" class="nb-input w-full h-32"
placeholder="Describe this entity so it can be found later"></textarea>
</label>
</div>
<div class="u-hairline pt-3 mt-4 space-y-3">
<div class="flex flex-col gap-2 sm:flex-row sm:items-end sm:justify-between">
<div>
<div class="text-xs uppercase tracking-wide opacity-70">Relationships</div>
<p class="text-xs opacity-70 max-w-md">
Select existing entities to link. Suggestions will pre-select likely matches.
</p>
</div>
<div class="flex flex-col gap-2 sm:flex-row sm:items-center">
<label class="flex items-center gap-2">
<span class="text-xs uppercase tracking-wide opacity-70">Type</span>
<input type="text" name="relationship_type" value="{{ relationship_list.relationship_type }}"
class="nb-input w-32" placeholder="RelatedTo" list="relationship-type-options">
</label>
<datalist id="relationship-type-options">
{% for rel_type in relationship_type_options %}
<option value="{{ rel_type }}"></option>
{% endfor %}
</datalist>
<button type="button" class="nb-btn btn-sm nb-cta sm:ml-2" hx-post="/knowledge-entity/suggestions"
hx-target="#relationship-list" hx-swap="outerHTML" hx-include="#modal_form">
Suggest Relationships
</button>
</div>
</div>
{% if relationship_list.relationship_options|length == 0 %}
<div id="relationship-list" class="nb-card p-4 text-sm opacity-70">
You need at least one existing entity before creating relationships.
</div>
{% else %}
{% set relationship_options = relationship_list.relationship_options %}
{% set relationship_type = relationship_list.relationship_type %}
{% set suggestion_count = relationship_list.suggestion_count %}
{% include "knowledge/relationship_selector.html" %}
{% endif %}
</div>
{% endblock %}
{% block primary_actions %}
<button type="submit" class="nb-btn nb-cta">
Create Entity
</button>
{% endblock %}

View File

@@ -0,0 +1,39 @@
<div id="relationship-list" class="space-y-3">
{% if suggestion_count > 0 %}
<div class="text-xs opacity-70">
Applied {{ suggestion_count }} suggestion{% if suggestion_count != 1 %}s{% endif %}. Toggle any you don't need.
</div>
{% endif %}
{% if relationship_options|length == 0 %}
<div class="nb-card p-4 text-sm opacity-70">
No entities available to relate yet.
</div>
{% else %}
<div class="nb-card max-h-56 overflow-y-auto divide-y">
{% for option in relationship_options %}
<label class="flex items-start gap-3 p-3 hover:bg-base-200 transition-colors cursor-pointer">
<input type="checkbox" name="relationship_ids" value="{{ option.entity.id }}" class="nb-checkbox mt-1" {% if
option.is_selected %}checked{% endif %}>
<div class="flex-1 min-w-0">
<div class="flex flex-wrap items-center gap-2">
<span class="font-medium truncate">{{ option.entity.name }}</span>
<span class="badge badge-xs badge-outline">{{ option.entity.entity_type }}</span>
{% if option.is_suggested %}
<span class="badge badge-xs badge-primary uppercase tracking-wide">Suggested</span>
{% endif %}
</div>
{% if option.entity.description %}
<p class="text-xs opacity-70 mt-1 truncate">{{ option.entity.description }}</p>
{% endif %}
{% if option.is_suggested and option.score is not none %}
<div class="text-[0.65rem] opacity-60 mt-1">
Match score {{ option.score | round(2) }}
</div>
{% endif %}
</div>
</label>
{% endfor %}
</div>
{% endif %}
</div>

View File

@@ -9,7 +9,8 @@
</tr>
</thead>
<tbody>
{% for relationship in relationships %}
{% for row in relationships %}
{% set relationship = row.relationship %}
<tr>
<!-- Origin column -->
<td>
@@ -30,7 +31,7 @@
{{ relationship.out }}
{% endfor %}
</td>
<td class="uppercase tracking-wide text-xs">{{ relationship.metadata.relationship_type }}</td>
<td class="uppercase tracking-wide text-xs">{{ row.relationship_type_label }}</td>
<td>
<button class="nb-btn btn-xs" hx-delete="/knowledge-relationship/{{ relationship.id }}"
hx-target="#relationship_table_section" hx-swap="outerHTML">
@@ -61,7 +62,7 @@
</td>
<td>
<input id="relationship_type_input" name="relationship_type" type="text" placeholder="RelatedTo"
class="nb-input w-full new_relationship_input" />
class="nb-input w-full new_relationship_input" value="{{ default_relationship_type }}" />
</td>
<td>
<button id="save_relationship_button" type="button" class="nb-btn btn-sm" hx-post="/knowledge-relationship"
@@ -80,4 +81,4 @@
document.getElementById('save_relationship_button').click();
}
});
</script>
</script>

View File

@@ -1,5 +1,6 @@
<dialog id="body_modal" class="modal">
<div class="modal-box rounded-none border-2 border-neutral bg-base-100 shadow-[8px_8px_0_0_#000] {% block modal_class %}{% endblock %}">
<div
class="modal-box rounded-none border-2 border-neutral bg-base-100 shadow-[8px_8px_0_0_#000] {% block modal_class %}{% endblock %}">
<form id="modal_form" {% block form_attributes %}{% endblock %}>
<div class="flex flex-col flex-1 gap-4">
{% block modal_content %}{% endblock %}
@@ -21,11 +22,11 @@
document.getElementById('body_modal').showModal();
// Close modal on successful form submission
document.getElementById('modal_form').addEventListener('htmx:afterRequest', (evt) => {
if (evt.detail.successful) {
document.getElementById('body_modal').close();
}
});
document.getElementById('modal_form')
.addEventListener('htmx:afterRequest', (evt) => {
if (evt.detail.elt !== evt.currentTarget) return; // ignore inner htmx requests
if (evt.detail.successful) document.getElementById('body_modal').close();
});
// Clear modal content on close to prevent browser back from reopening it
document.getElementById('body_modal').addEventListener('close', (evt) => {
@@ -35,4 +36,4 @@
<form method="dialog" class="modal-backdrop">
<button>close</button>
</form>
</dialog>
</dialog>

View File

@@ -0,0 +1,113 @@
{% extends 'body_base.html' %}
{% block title %}Minne - Scratchpad{% endblock %}
{% block main %}
<main id="main_section" class="flex justify-center grow mt-2 sm:mt-4 gap-6 mb-10 w-full">
<div class="container">
{% block header %}
<div class="nb-panel p-3 mb-4 flex items-center justify-between">
<h2 class="text-xl font-extrabold tracking-tight">Scratchpads</h2>
<form hx-post="/scratchpad" hx-target="#main_section" hx-swap="outerHTML" class="flex gap-2">
<input type="text" name="title" placeholder="Enter scratchpad title..." class="nb-input nb-input-sm" required>
<button type="submit" class="nb-btn nb-cta">
{% include "icons/scratchpad_icon.html" %} Create
</button>
</form>
</div>
{% endblock %}
{% block content %}
<div class="grid gap-4 md:grid-cols-2 lg:grid-cols-3">
{% for scratchpad in scratchpads %}
<div class="nb-card p-4 hover:nb-shadow-hover transition-all">
<div class="flex justify-between items-start mb-2">
<h3 class="font-semibold text-lg truncate flex-1">{{ scratchpad.title }}</h3>
<div class="flex gap-1 ml-2">
<button hx-get="/scratchpad/{{ scratchpad.id }}/modal" hx-target="#modal" hx-swap="innerHTML"
class="nb-btn nb-btn-sm btn-ghost" title="Edit scratchpad">
{% include "icons/pencil_icon.html" %}
</button>
<form hx-post="/scratchpad/{{ scratchpad.id }}/archive" hx-target="#main_section" hx-swap="outerHTML"
class="inline-flex">
<button type="submit" class="nb-btn nb-btn-sm btn-ghost text-warning" title="Archive scratchpad">
{% include "icons/delete_icon.html" %}
</button>
</form>
</div>
</div>
<div class="text-sm text-base-content/70 mb-2">
{{ scratchpad.content[:100] }}{% if scratchpad.content|length > 100 %}...{% endif %}
</div>
<div class="text-xs text-base-content/50">
Last saved: {{ scratchpad.last_saved_at | datetimeformat(format="short", tz=user.timezone) }}
</div>
</div>
{% else %}
<div class="col-span-full nb-panel p-8 text-center">
<h3 class="text-lg font-semibold mt-2 mb-2">No scratchpads yet</h3>
<p class="text-base-content/70 mb-4">Create your first scratchpad to start jotting down ideas</p>
<form hx-post="/scratchpad" hx-target="#main_section" hx-swap="outerHTML"
class="inline-flex gap-2">
<input type="text" name="title" placeholder="My first scratchpad..." class="nb-input" required>
<button type="submit" class="nb-btn nb-cta">
{% include "icons/scratchpad_icon.html" %} Create Scratchpad
</button>
</form>
</div>
{% endfor %}
</div>
{% endblock %}
{% if archived_scratchpads %}
<div class="mt-6">
<details class="nb-panel p-3 space-y-4">
<summary class="flex items-center justify-between gap-2 text-sm font-semibold cursor-pointer">
<span>Archived Scratchpads</span>
<span class="nb-badge">{{ archived_scratchpads|length }}</span>
</summary>
<div class="text-sm text-base-content/60">Archived scratchpads were ingested into your knowledge base. You can
restore them if you want to keep editing.</div>
<div class="grid gap-3 md:grid-cols-2 lg:grid-cols-3">
{% for scratchpad in archived_scratchpads %}
<div class="nb-card p-3 space-y-3">
<div class="flex items-start justify-between gap-3">
<div class="flex-1 min-w-0">
<h4 class="font-semibold text-base truncate" title="{{ scratchpad.title }}">{{ scratchpad.title }}</h4>
<div class="text-xs text-base-content/50">Archived {{ scratchpad.archived_at | datetimeformat(format="short", tz=user.timezone) }}</div>
{% if scratchpad.ingested_at %}
<div class="text-xs text-base-content/40">Ingestion started {{ scratchpad.ingested_at | datetimeformat(format="short", tz=user.timezone) }}</div>
{% endif %}
</div>
<div class="flex items-center gap-2 flex-shrink-0 flex-wrap justify-end">
<form hx-post="/scratchpad/{{ scratchpad.id }}/restore" hx-target="#main_section" hx-swap="outerHTML"
class="inline-flex">
<button type="submit" class="nb-btn nb-btn-sm">
Restore
</button>
</form>
<form hx-delete="/scratchpad/{{ scratchpad.id }}" hx-target="#main_section" hx-swap="outerHTML"
hx-confirm="Permanently delete this scratchpad?" class="inline-flex">
<button type="submit" class="nb-btn nb-btn-sm btn-ghost text-error" title="Delete permanently">
{% include "icons/delete_icon.html" %}
</button>
</form>
</div>
</div>
</div>
{% endfor %}
</div>
</details>
</div>
{% endif %}
</div>
</main>
{% if new_scratchpad %}
<div hx-swap-oob="innerHTML:#modal">
<div hx-get="/scratchpad/{{ new_scratchpad.id }}/modal" hx-trigger="load" hx-target="#modal" hx-swap="innerHTML"></div>
</div>
{% endif %}
{% endblock %}

View File

@@ -0,0 +1,286 @@
{% extends "modal_base.html" %}
{% block modal_class %}w-11/12 max-w-[90ch] max-h-[95%] overflow-y-auto{% endblock %}
{% block form_attributes %}{% endblock %}
{% block modal_content %}
<h3 class="text-xl font-extrabold tracking-tight">
<div class="flex items-center gap-2" id="title-container">
<span class="font-semibold text-lg flex-1 truncate" id="title-display">{{ scratchpad.title }}</span>
<button type="button" onclick="editTitle()" class="nb-btn nb-btn-sm btn-ghost">
{% include "icons/edit_icon.html" %} Edit title
</button>
</div>
<!-- Hidden title form -->
<form id="title-form" hx-patch="/scratchpad/{{ scratchpad.id }}/title" hx-target="#body_modal" hx-swap="outerHTML"
class="hidden flex items-center gap-2">
<input type="text" name="title" value="{{ scratchpad.title }}"
class="nb-input nb-input-sm font-semibold text-lg flex-1" id="title-input">
<button type="submit" class="nb-btn nb-btn-sm">{% include "icons/check_icon.html" %}</button>
<button type="button" onclick="cancelEditTitle()" class="nb-btn nb-btn-sm btn-ghost">{% include "icons/x_icon.html" %}</button>
</form>
</h3>
<div class="flex flex-col gap-3">
<div class="text-xs text-base-content/50 flex items-center gap-2">
<span>Last saved: <span id="last-saved">{{ scratchpad.last_saved_at | datetimeformat(format="short", tz=user.timezone) }}</span></span>
<span id="save-status"
class="inline-flex items-center gap-1 text-success opacity-0 transition-opacity duration-300 pointer-events-none">
{% include "icons/check_icon.html" %} <span class="uppercase tracking-wider text-[0.7em]">Saved</span>
</span>
</div>
<form id="auto-save-form"
hx-patch="/scratchpad/{{ scratchpad.id }}/auto-save"
hx-trigger="keyup changed delay:2s, focusout"
hx-indicator="#save-indicator"
hx-swap="none"
class="flex flex-col gap-2">
<label class="w-full">
<textarea name="content" id="scratchpad-content"
class="nb-input w-full min-h-[60vh] resize-none font-mono text-sm"
placeholder="Start typing your thoughts... (Tab to indent, Shift+Tab to outdent)"
autofocus>{{ scratchpad.content }}</textarea>
</label>
<div class="flex items-center justify-between">
<div class="flex items-center gap-2">
<div id="save-indicator" class="htmx-indicator text-sm text-base-content/50 hidden">
{% include "icons/refresh_icon.html" %} Saving...
</div>
</div>
<div class="text-sm text-base-content/50">
<span id="char-count">{{ scratchpad.content|length }}</span> characters
</div>
</div>
</form>
<div id="action-row" class="flex gap-2 justify-between items-center">
<form hx-post="/scratchpad/{{ scratchpad.id }}/ingest"
hx-target="#main_section"
hx-swap="outerHTML"
hx-on::after-request="if(event.detail.successful) document.getElementById('body_modal').close()"
class="inline flex flex-col gap-3"
id="ingest-form">
<button type="button" class="nb-btn nb-cta" onclick="toggleIngestConfirmation(true)"
data-role="ingest-trigger">
{% include "icons/send_icon.html" %} Ingest as Content
</button>
<div id="ingest-warning"
class="nb-card bg-warning/10 border border-warning text-warning-content text-sm leading-relaxed flex flex-col gap-2 p-3 hidden">
<div>
<strong class="font-semibold text-warning">Before you ingest</strong>
<p>
This will archive the scratchpad right away. After ingestion finishes you can review the content from the
<a href="/content" class="nb-link">Content</a> page, and archived scratchpads remain available below with a restore option.
</p>
</div>
<div class="flex items-center gap-2">
<button type="submit" class="nb-btn nb-btn-sm nb-cta">
Confirm ingest
</button>
<button type="button" class="nb-btn nb-btn-sm btn-ghost" onclick="toggleIngestConfirmation(false)">
Cancel
</button>
</div>
</div>
</form>
<form id="archive-form" hx-post="/scratchpad/{{ scratchpad.id }}/archive" hx-target="#main_section"
hx-swap="outerHTML" hx-on::after-request="if(event.detail.successful) document.getElementById('body_modal').close()"
class="inline">
<button type="submit" class="nb-btn nb-btn-ghost text-warning">
{% include "icons/delete_icon.html" %} Archive
</button>
</form>
</div>
</div>
<script>
// Title editing functions
function editTitle() {
const titleContainer = document.getElementById('title-container');
const titleForm = document.getElementById('title-form');
const titleInput = document.getElementById('title-input');
if (!titleContainer || !titleForm) return;
titleContainer.classList.add('hidden');
titleForm.classList.remove('hidden');
if (titleInput) {
titleInput.focus();
titleInput.select();
}
}
function cancelEditTitle() {
const titleContainer = document.getElementById('title-container');
const titleForm = document.getElementById('title-form');
if (!titleContainer || !titleForm) return;
titleContainer.classList.remove('hidden');
titleForm.classList.add('hidden');
}
(function initScratchpadModal() {
const modal = document.getElementById('body_modal');
if (!modal) return;
const textarea = modal.querySelector('#scratchpad-content');
const charCount = modal.querySelector('#char-count');
const lastSaved = modal.querySelector('#last-saved');
const saveStatus = modal.querySelector('#save-status');
const autoSaveForm = modal.querySelector('#auto-save-form');
const ingestWarning = modal.querySelector('#ingest-warning');
const ingestForm = modal.querySelector('#ingest-form');
const actionRow = modal.querySelector('#action-row');
let saveStatusTimeout;
const updateCharCount = () => {
if (!textarea || !charCount) return;
charCount.textContent = textarea.value.length;
};
const autoResize = () => {
if (!textarea) return;
textarea.style.height = 'auto';
textarea.style.height = `${textarea.scrollHeight}px`;
};
if (textarea) {
textarea.addEventListener('input', () => {
updateCharCount();
autoResize();
});
// Tab support - insert 4 spaces or handle outdenting
textarea.addEventListener('keydown', (e) => {
if (e.key === 'Tab') {
e.preventDefault();
const start = textarea.selectionStart;
const end = textarea.selectionEnd;
const value = textarea.value;
if (e.shiftKey) {
// Shift+Tab: Outdent - remove up to 4 spaces from start of current line
const lineStart = value.lastIndexOf('\n', start - 1) + 1;
const currentLine = value.substring(lineStart, start);
const leadingSpaces = currentLine.match(/^ */)?.[0]?.length || 0;
const spacesToRemove = Math.min(4, leadingSpaces);
if (spacesToRemove > 0) {
textarea.value = value.substring(0, lineStart) +
currentLine.substring(spacesToRemove) +
value.substring(start);
// Adjust cursor position
textarea.selectionStart = textarea.selectionEnd = start - spacesToRemove;
}
} else {
// Tab: Indent - insert 4 spaces at cursor position
textarea.value = value.substring(0, start) + ' ' + value.substring(end);
// Restore cursor position after inserted spaces
textarea.selectionStart = textarea.selectionEnd = start + 4;
}
// Trigger input event to update character count and auto-resize
textarea.dispatchEvent(new Event('input'));
}
});
updateCharCount();
autoResize();
}
if (autoSaveForm) {
autoSaveForm.addEventListener('htmx:beforeRequest', (evt) => {
if (evt.detail.elt !== autoSaveForm) return;
if (saveStatus) {
saveStatus.classList.add('opacity-0');
saveStatus.classList.remove('opacity-100');
}
});
autoSaveForm.addEventListener('htmx:afterRequest', (evt) => {
if (evt.detail.elt !== autoSaveForm) return;
if (!evt.detail.successful) return;
const xhr = evt.detail.xhr;
if (xhr && xhr.responseText) {
try {
const data = JSON.parse(xhr.responseText);
if (data.last_saved_at_display && lastSaved) {
lastSaved.textContent = data.last_saved_at_display;
}
} catch (_) {
// Ignore JSON parse errors
}
}
if (saveStatus) {
if (saveStatusTimeout) {
clearTimeout(saveStatusTimeout);
}
saveStatus.classList.remove('opacity-0');
saveStatus.classList.add('opacity-100');
saveStatusTimeout = setTimeout(() => {
saveStatus.classList.add('opacity-0');
saveStatus.classList.remove('opacity-100');
}, 2000);
}
});
}
if (ingestForm) {
ingestForm.addEventListener('htmx:afterRequest', (evt) => {
if (evt.detail.elt !== ingestForm) return;
toggleIngestConfirmation(false);
});
}
})();
function toggleIngestConfirmation(show) {
const modal = document.getElementById('body_modal');
if (!modal) return;
const warning = modal.querySelector('#ingest-warning');
const actionRow = modal.querySelector('#action-row');
const ingestForm = modal.querySelector('#ingest-form');
const archiveForm = modal.querySelector('#archive-form');
const ingestButton = modal.querySelector('[data-role="ingest-trigger"]');
const confirmButton = warning ? warning.querySelector('button[type="submit"]') : null;
if (!warning || !ingestButton || !actionRow || !ingestForm) return;
if (show) {
warning.classList.remove('hidden');
ingestButton.classList.add('hidden');
actionRow.classList.add('flex-col', 'items-stretch');
actionRow.classList.remove('items-center', 'justify-between');
ingestForm.classList.add('w-full');
if (archiveForm) {
archiveForm.classList.add('w-full');
}
if (confirmButton) {
confirmButton.focus();
}
} else {
warning.classList.add('hidden');
ingestButton.classList.remove('hidden');
actionRow.classList.remove('flex-col', 'items-stretch');
actionRow.classList.add('items-center', 'justify-between');
ingestForm.classList.remove('w-full');
if (archiveForm) {
archiveForm.classList.remove('w-full');
}
}
}
</script>
{% endblock %}
{% block primary_actions %}
<!-- No additional actions needed -->
{% endblock %}

View File

@@ -2,12 +2,15 @@
<ul class="nb-card p-0">
{% for result in search_result %}
<li class="p-4 u-hairline hover:bg-base-200/40 flex gap-3">
<div class="w-10 h-10 flex-shrink-0 self-start mt-1 grid place-items-center border-2 border-neutral bg-base-100 shadow-[4px_4px_0_0_#000]">
{% if result.url_info and result.url_info.url %}
{% if result.result_type == "text_content" %}
{% set tc = result.text_content %}
<div
class="w-10 h-10 flex-shrink-0 self-start mt-1 grid place-items-center border-2 border-neutral bg-base-100 shadow-[4px_4px_0_0_#000]">
{% if tc.url_info and tc.url_info.url %}
<div class="tooltip tooltip-right" data-tip="Web Link">
{% include "icons/link_icon.html" %}
</div>
{% elif result.file_info and result.file_info.file_name %}
{% elif tc.file_info and tc.file_info.file_name %}
<div class="tooltip tooltip-right" data-tip="File Document">
{% include "icons/document_icon.html" %}
</div>
@@ -20,21 +23,22 @@
<div class="flex-1 min-w-0">
<h3 class="text-lg font-extrabold mb-1 leading-snug">
<a hx-get="/content/{{ result.id }}/read" hx-target="#modal" hx-swap="innerHTML" class="nb-link">
{% set title_text = result.highlighted_url_title
| default(result.url_info.title if result.url_info else none, true)
| default(result.highlighted_file_name, true)
| default(result.file_info.file_name if result.file_info else none, true)
| default("Text snippet: " ~ (result.id | string)[-8:], true) %}
{{ title_text | safe }}
<a hx-get="/content/{{ tc.id }}/read" hx-target="#modal" hx-swap="innerHTML" class="nb-link">
{% set title_text = tc.highlighted_url_title
| default(tc.url_info.title if tc.url_info else none, true)
| default(tc.highlighted_file_name, true)
| default(tc.file_info.file_name if tc.file_info else none, true)
| default("Text snippet: " ~ (tc.id | string)[-8:], true) %}
{{ title_text }}
</a>
</h3>
<div class="markdown-content prose-tufte-compact text-base-content/80 mb-3 overflow-hidden line-clamp-6" data-content="{{result.highlighted_text | escape}}">
{% if result.highlighted_text %}
{{ result.highlighted_text | escape }}
{% elif result.text %}
{{ result.text | escape }}
<div class="markdown-content prose-tufte-compact text-base-content/80 mb-3 overflow-hidden line-clamp-6"
data-content="{{tc.highlighted_text | escape}}">
{% if tc.highlighted_text %}
{{ tc.highlighted_text | escape }}
{% elif tc.text %}
{{ tc.text | escape }}
{% else %}
<span class="italic opacity-60">No text preview available.</span>
{% endif %}
@@ -43,21 +47,21 @@
<div class="text-xs flex flex-wrap gap-x-4 gap-y-2 items-center">
<span class="inline-flex items-center">
<span class="uppercase tracking-wide opacity-60 mr-2">Category</span>
<span class="nb-badge">{{ result.highlighted_category | default(result.category, true) | safe }}</span>
<span class="nb-badge">{{ tc.highlighted_category | default(tc.category, true) | safe }}</span>
</span>
{% if result.highlighted_context or result.context %}
{% if tc.highlighted_context or tc.context %}
<span class="inline-flex items-center min-w-0">
<span class="uppercase tracking-wide opacity-60 mr-2">Context</span>
<span class="nb-badge">{{ result.highlighted_context | default(result.context, true) | safe }}</span>
<span class="nb-badge">{{ tc.highlighted_context | default(tc.context, true) | safe }}</span>
</span>
{% endif %}
{% if result.url_info and result.url_info.url %}
{% if tc.url_info and tc.url_info.url %}
<span class="inline-flex items-center min-w-0">
<span class="uppercase tracking-wide opacity-60 mr-2">Source</span>
<a href="{{ result.url_info.url }}" target="_blank" class="nb-link truncate" title="{{ result.url_info.url }}">
{{ result.highlighted_url | default(result.url_info.url ) | safe }}
<a href="{{ tc.url_info.url }}" target="_blank" class="nb-link truncate" title="{{ tc.url_info.url }}">
{{ tc.highlighted_url | default(tc.url_info.url ) | safe }}
</a>
</span>
{% endif %}
@@ -68,18 +72,65 @@
</span>
</div>
</div>
{% elif result.result_type == "knowledge_entity" %}
{% set entity = result.knowledge_entity %}
<div
class="w-10 h-10 flex-shrink-0 self-start mt-1 grid place-items-center border-2 border-neutral bg-base-100 shadow-[4px_4px_0_0_#000]">
<div class="tooltip tooltip-right" data-tip="Knowledge Entity">
{% include "icons/book_icon.html" %}
</div>
</div>
<div class="flex-1 min-w-0">
<h3 class="text-lg font-extrabold mb-1 leading-snug">
<a hx-get="/knowledge-entity/{{ entity.id }}" hx-target="#modal" hx-swap="innerHTML" class="nb-link">
{% set entity_title = entity.highlighted_name | default(entity.name, true) %}
{{ entity_title }}
</a>
</h3>
<div class="prose prose-tufte-compact text-base-content/80 mb-3 overflow-hidden line-clamp-6">
{% if entity.highlighted_description %}
{{ entity.highlighted_description }}
{% elif entity.description %}
{{ entity.description | escape }}
{% else %}
<span class="italic opacity-60">No description available.</span>
{% endif %}
</div>
<div class="text-xs flex flex-wrap gap-x-4 gap-y-2 items-center">
<span class="inline-flex items-center">
<span class="uppercase tracking-wide opacity-60 mr-2">Entity Type</span>
<span class="nb-badge">{{ entity.entity_type }}</span>
</span>
{% if entity.source_id %}
<span class="inline-flex items-center min-w-0">
<span class="uppercase tracking-wide opacity-60 mr-2">Source ID</span>
<span class="nb-badge truncate max-w-xs" title="{{ entity.source_id }}">{{ entity.source_id }}</span>
</span>
{% endif %}
<span class="inline-flex items-center">
<span class="uppercase tracking-wide opacity-60 mr-2">Score</span>
<span class="nb-badge">{{ result.score }}</span>
</span>
</div>
</div>
{% endif %}
</li>
{% endfor %}
</ul>
</ul>
{% elif query_param is defined and query_param | trim != "" %}
<div class="nb-panel p-5 text-center">
<p class="text-xl font-extrabold mb-2">No results for “{{ query_param | escape }}”.</p>
<p class="text-sm opacity-70">Try different keywords or check for typos.</p>
</div>
</div>
{% else %}
<div class="nb-panel p-5 text-center">
<p class="text-lg font-semibold">Enter a term above to search your knowledge base.</p>
<p class="text-sm opacity-70">Results will appear here.</p>
</div>
{% endif %}
</div>
{% endif %}

View File

@@ -9,6 +9,8 @@
{% include "icons/chat_icon.html" %}
{% elif name == "search" %}
{% include "icons/search_icon.html" %}
{% elif name == "scratchpad" %}
{% include "icons/scratchpad_icon.html" %}
{% endif %}
{% endmacro %}
@@ -26,7 +28,8 @@
("/knowledge", "book", "Knowledge"),
("/content", "document", "Content"),
("/chat", "chat", "Chat"),
("/search", "search", "Search")
("/search", "search", "Search"),
("/scratchpad", "scratchpad", "Scratchpad")
] %}
<li>
<a hx-boost="true" href="{{ url }}" class="nb-btn w-full justify-start gap-3 bg-base-100 hover:bg-base-200">

View File

@@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2021"
license = "AGPL-3.0-or-later"
[lints]
workspace = true
[dependencies]
tokio = { workspace = true }
serde = { workspace = true }
@@ -15,7 +18,8 @@ async-openai = { workspace = true }
surrealdb = { workspace = true }
dom_smoothie = { workspace = true }
tempfile = { workspace = true }
axum_typed_multipart = { workspace = true}
axum_typed_multipart = { workspace = true}
anyhow = { workspace = true }
reqwest = { workspace = true }
chrono = { workspace = true }
text-splitter = { workspace = true }
@@ -25,8 +29,11 @@ headless_chrome = { workspace = true }
base64 = { workspace = true }
pdf-extract = "0.9"
lopdf = "0.32"
bytes = { workspace = true }
common = { path = "../common" }
composite-retrieval = { path = "../composite-retrieval" }
async-trait = { workspace = true }
state-machines = { workspace = true }
[features]
docker = []

View File

@@ -1,141 +0,0 @@
use std::sync::Arc;
use async_openai::types::{
ChatCompletionRequestSystemMessage, ChatCompletionRequestUserMessage,
CreateChatCompletionRequest, CreateChatCompletionRequestArgs, ResponseFormat,
ResponseFormatJsonSchema,
};
use common::{
error::AppError,
storage::{
db::SurrealDbClient,
types::{knowledge_entity::KnowledgeEntity, system_settings::SystemSettings},
},
};
use composite_retrieval::retrieve_entities;
use serde_json::json;
use tracing::{debug, info};
use crate::{
types::llm_enrichment_result::LLMEnrichmentResult,
utils::llm_instructions::{get_ingress_analysis_schema, INGRESS_ANALYSIS_SYSTEM_MESSAGE},
};
pub struct IngestionEnricher {
db_client: Arc<SurrealDbClient>,
openai_client: Arc<async_openai::Client<async_openai::config::OpenAIConfig>>,
}
impl IngestionEnricher {
pub fn new(
db_client: Arc<SurrealDbClient>,
openai_client: Arc<async_openai::Client<async_openai::config::OpenAIConfig>>,
) -> Self {
Self {
db_client,
openai_client,
}
}
pub async fn analyze_content(
&self,
category: &str,
context: Option<&str>,
text: &str,
user_id: &str,
) -> Result<LLMEnrichmentResult, AppError> {
info!("getting similar entitities");
let similar_entities = self
.find_similar_entities(category, context, text, user_id)
.await?;
info!("got similar entitities");
let llm_request = self
.prepare_llm_request(category, context, text, &similar_entities)
.await?;
self.perform_analysis(llm_request).await
}
async fn find_similar_entities(
&self,
category: &str,
context: Option<&str>,
text: &str,
user_id: &str,
) -> Result<Vec<KnowledgeEntity>, AppError> {
let input_text = format!(
"content: {}, category: {}, user_context: {:?}",
text, category, context
);
retrieve_entities(&self.db_client, &self.openai_client, &input_text, user_id).await
}
async fn prepare_llm_request(
&self,
category: &str,
context: Option<&str>,
text: &str,
similar_entities: &[KnowledgeEntity],
) -> Result<CreateChatCompletionRequest, AppError> {
let settings = SystemSettings::get_current(&self.db_client).await?;
let entities_json = json!(similar_entities
.iter()
.map(|entity| {
json!({
"KnowledgeEntity": {
"id": entity.id,
"name": entity.name,
"description": entity.description
}
})
})
.collect::<Vec<_>>());
let user_message = format!(
"Category:\n{}\ncontext:\n{:?}\nContent:\n{}\nExisting KnowledgeEntities in database:\n{}",
category, context, text, entities_json
);
debug!("Prepared LLM request message: {}", user_message);
let response_format = ResponseFormat::JsonSchema {
json_schema: ResponseFormatJsonSchema {
description: Some("Structured analysis of the submitted content".into()),
name: "content_analysis".into(),
schema: Some(get_ingress_analysis_schema()),
strict: Some(true),
},
};
let request = CreateChatCompletionRequestArgs::default()
.model(&settings.processing_model)
.messages([
ChatCompletionRequestSystemMessage::from(INGRESS_ANALYSIS_SYSTEM_MESSAGE).into(),
ChatCompletionRequestUserMessage::from(user_message).into(),
])
.response_format(response_format)
.build()?;
Ok(request)
}
async fn perform_analysis(
&self,
request: CreateChatCompletionRequest,
) -> Result<LLMEnrichmentResult, AppError> {
let response = self.openai_client.chat().create(request).await?;
let content = response
.choices
.first()
.and_then(|choice| choice.message.content.as_ref())
.ok_or(AppError::LLMParsing(
"No content found in LLM response".into(),
))?;
serde_json::from_str::<LLMEnrichmentResult>(content).map_err(|e| {
AppError::LLMParsing(format!("Failed to parse LLM response into analysis: {}", e))
})
}
}

View File

@@ -1,103 +1,47 @@
pub mod enricher;
pub mod pipeline;
pub mod types;
pub mod utils;
use chrono::Utc;
use common::storage::{
db::SurrealDbClient,
types::ingestion_task::{IngestionTask, IngestionTaskStatus},
types::ingestion_task::{IngestionTask, DEFAULT_LEASE_SECS},
};
use futures::StreamExt;
use pipeline::IngestionPipeline;
use std::sync::Arc;
use surrealdb::Action;
use tracing::{error, info};
use tokio::time::{sleep, Duration};
use tracing::{error, info, warn};
use uuid::Uuid;
pub async fn run_worker_loop(
db: Arc<SurrealDbClient>,
ingestion_pipeline: Arc<IngestionPipeline>,
) -> Result<(), Box<dyn std::error::Error>> {
let worker_id = format!("ingestion-worker-{}", Uuid::new_v4());
let lease_duration = Duration::from_secs(DEFAULT_LEASE_SECS as u64);
let idle_backoff = Duration::from_millis(500);
loop {
// First, check for any unfinished tasks
let unfinished_tasks = IngestionTask::get_unfinished_tasks(&db).await?;
if !unfinished_tasks.is_empty() {
info!("Found {} unfinished jobs", unfinished_tasks.len());
for task in unfinished_tasks {
ingestion_pipeline.process_task(task).await?;
}
}
// If no unfinished jobs, start listening for new ones
info!("Listening for new jobs...");
let mut job_stream = IngestionTask::listen_for_tasks(&db).await?;
while let Some(notification) = job_stream.next().await {
match notification {
Ok(notification) => {
info!("Received notification: {:?}", notification);
match notification.action {
Action::Create => {
if let Err(e) = ingestion_pipeline.process_task(notification.data).await
{
error!("Error processing task: {}", e);
}
}
Action::Update => {
match notification.data.status {
IngestionTaskStatus::Completed
| IngestionTaskStatus::Error { .. }
| IngestionTaskStatus::Cancelled => {
info!(
"Skipping already completed/error/cancelled task: {}",
notification.data.id
);
continue;
}
IngestionTaskStatus::InProgress { attempts, .. } => {
// Only process if this is a retry after an error, not our own update
if let Ok(Some(current_task)) =
db.get_item::<IngestionTask>(&notification.data.id).await
{
match current_task.status {
IngestionTaskStatus::Error { .. }
if attempts
< common::storage::types::ingestion_task::MAX_ATTEMPTS =>
{
// This is a retry after an error
if let Err(e) =
ingestion_pipeline.process_task(current_task).await
{
error!("Error processing task retry: {}", e);
}
}
_ => {
info!(
"Skipping in-progress update for task: {}",
notification.data.id
);
continue;
}
}
}
}
IngestionTaskStatus::Created => {
// Shouldn't happen with Update action, but process if it does
if let Err(e) =
ingestion_pipeline.process_task(notification.data).await
{
error!("Error processing task: {}", e);
}
}
}
}
_ => {} // Ignore other actions
}
match IngestionTask::claim_next_ready(&db, &worker_id, Utc::now(), lease_duration).await {
Ok(Some(task)) => {
let task_id = task.id.clone();
info!(
%worker_id,
%task_id,
attempt = task.attempts,
"claimed ingestion task"
);
if let Err(err) = ingestion_pipeline.process_task(task).await {
error!(%worker_id, %task_id, error = %err, "ingestion task failed");
}
Err(e) => error!("Error in job notification: {}", e),
}
Ok(None) => {
sleep(idle_backoff).await;
}
Err(err) => {
error!(%worker_id, error = %err, "failed to claim ingestion task");
warn!("Backing off for 1s after claim error");
sleep(Duration::from_secs(1)).await;
}
}
// If we reach here, the stream has ended (connection lost?)
error!("Database stream ended unexpectedly, reconnecting...");
tokio::time::sleep(tokio::time::Duration::from_secs(5)).await;
}
}

View File

@@ -1,238 +0,0 @@
use std::{sync::Arc, time::Instant};
use chrono::Utc;
use text_splitter::TextSplitter;
use tokio::time::{sleep, Duration};
use tracing::{info, warn};
use common::{
error::AppError,
storage::{
db::SurrealDbClient,
types::{
ingestion_task::{IngestionTask, IngestionTaskStatus, MAX_ATTEMPTS},
knowledge_entity::KnowledgeEntity,
knowledge_relationship::KnowledgeRelationship,
text_chunk::TextChunk,
text_content::TextContent,
},
},
utils::{config::AppConfig, embedding::generate_embedding},
};
use crate::{
enricher::IngestionEnricher,
types::{llm_enrichment_result::LLMEnrichmentResult, to_text_content},
};
pub struct IngestionPipeline {
db: Arc<SurrealDbClient>,
openai_client: Arc<async_openai::Client<async_openai::config::OpenAIConfig>>,
config: AppConfig,
}
impl IngestionPipeline {
pub async fn new(
db: Arc<SurrealDbClient>,
openai_client: Arc<async_openai::Client<async_openai::config::OpenAIConfig>>,
config: AppConfig,
) -> Result<Self, AppError> {
Ok(Self {
db,
openai_client,
config,
})
}
pub async fn process_task(&self, task: IngestionTask) -> Result<(), AppError> {
let current_attempts = match task.status {
IngestionTaskStatus::InProgress { attempts, .. } => attempts + 1,
_ => 1,
};
// Update status to InProgress with attempt count
IngestionTask::update_status(
&task.id,
IngestionTaskStatus::InProgress {
attempts: current_attempts,
last_attempt: Utc::now(),
},
&self.db,
)
.await?;
let text_content =
to_text_content(task.content, &self.db, &self.config, &self.openai_client).await?;
match self.process(&text_content).await {
Ok(_) => {
IngestionTask::update_status(&task.id, IngestionTaskStatus::Completed, &self.db)
.await?;
Ok(())
}
Err(e) => {
if current_attempts >= MAX_ATTEMPTS {
IngestionTask::update_status(
&task.id,
IngestionTaskStatus::Error {
message: format!("Max attempts reached: {}", e),
},
&self.db,
)
.await?;
}
Err(AppError::Processing(e.to_string()))
}
}
}
pub async fn process(&self, content: &TextContent) -> Result<(), AppError> {
let now = Instant::now();
// Perform analyis, this step also includes retrieval
let analysis = self.perform_semantic_analysis(content).await?;
let end = now.elapsed();
info!(
"{:?} time elapsed during creation of entities and relationships",
end
);
// Convert analysis to application objects
let (entities, relationships) = analysis
.to_database_entities(&content.id, &content.user_id, &self.openai_client, &self.db)
.await?;
// Store everything
tokio::try_join!(
self.store_graph_entities(entities, relationships),
self.store_vector_chunks(content),
)?;
// Store original content
self.db.store_item(content.to_owned()).await?;
self.db.rebuild_indexes().await?;
Ok(())
}
async fn perform_semantic_analysis(
&self,
content: &TextContent,
) -> Result<LLMEnrichmentResult, AppError> {
let analyser = IngestionEnricher::new(self.db.clone(), self.openai_client.clone());
analyser
.analyze_content(
&content.category,
content.context.as_deref(),
&content.text,
&content.user_id,
)
.await
}
async fn store_graph_entities(
&self,
entities: Vec<KnowledgeEntity>,
relationships: Vec<KnowledgeRelationship>,
) -> Result<(), AppError> {
let entities = Arc::new(entities);
let relationships = Arc::new(relationships);
let entity_count = entities.len();
let relationship_count = relationships.len();
const STORE_GRAPH_MUTATION: &str = r#"
BEGIN TRANSACTION;
LET $entities = $entities;
LET $relationships = $relationships;
FOR $entity IN $entities {
CREATE type::thing('knowledge_entity', $entity.id) CONTENT $entity;
};
FOR $relationship IN $relationships {
LET $in_node = type::thing('knowledge_entity', $relationship.in);
LET $out_node = type::thing('knowledge_entity', $relationship.out);
RELATE $in_node->relates_to->$out_node CONTENT {
id: type::thing('relates_to', $relationship.id),
metadata: $relationship.metadata
};
};
COMMIT TRANSACTION;
"#;
const MAX_ATTEMPTS: usize = 3;
const INITIAL_BACKOFF_MS: u64 = 50;
const MAX_BACKOFF_MS: u64 = 800;
let mut backoff_ms = INITIAL_BACKOFF_MS;
let mut success = false;
for attempt in 0..MAX_ATTEMPTS {
let result = self
.db
.client
.query(STORE_GRAPH_MUTATION)
.bind(("entities", entities.clone()))
.bind(("relationships", relationships.clone()))
.await;
match result {
Ok(_) => {
success = true;
break;
}
Err(err) => {
if Self::is_retryable_conflict(&err) && attempt + 1 < MAX_ATTEMPTS {
warn!(
attempt = attempt + 1,
"Transient SurrealDB conflict while storing graph data; retrying"
);
sleep(Duration::from_millis(backoff_ms)).await;
backoff_ms = (backoff_ms * 2).min(MAX_BACKOFF_MS);
continue;
}
return Err(AppError::from(err));
}
}
}
if !success {
return Err(AppError::InternalError(
"Failed to store graph entities after retries".to_string(),
));
}
info!(
"Stored {} entities and {} relationships",
entity_count, relationship_count
);
Ok(())
}
async fn store_vector_chunks(&self, content: &TextContent) -> Result<(), AppError> {
let splitter = TextSplitter::new(500..2000);
let chunks = splitter.chunks(&content.text);
// Could potentially process chunks in parallel with a bounded concurrent limit
for chunk in chunks {
let embedding = generate_embedding(&self.openai_client, chunk, &self.db).await?;
let text_chunk = TextChunk::new(
content.id.to_string(),
chunk.to_string(),
embedding,
content.user_id.to_string(),
);
self.db.store_item(text_chunk).await?;
}
Ok(())
}
fn is_retryable_conflict(error: &surrealdb::Error) -> bool {
error
.to_string()
.contains("Failed to commit transaction due to a read or write conflict")
}
}

View File

@@ -0,0 +1,35 @@
#[derive(Debug, Clone)]
pub struct IngestionTuning {
pub retry_base_delay_secs: u64,
pub retry_max_delay_secs: u64,
pub retry_backoff_cap_exponent: u32,
pub graph_store_attempts: usize,
pub graph_initial_backoff_ms: u64,
pub graph_max_backoff_ms: u64,
pub chunk_min_chars: usize,
pub chunk_max_chars: usize,
pub chunk_insert_concurrency: usize,
pub entity_embedding_concurrency: usize,
}
impl Default for IngestionTuning {
fn default() -> Self {
Self {
retry_base_delay_secs: 30,
retry_max_delay_secs: 15 * 60,
retry_backoff_cap_exponent: 5,
graph_store_attempts: 3,
graph_initial_backoff_ms: 50,
graph_max_backoff_ms: 800,
chunk_min_chars: 500,
chunk_max_chars: 2_000,
chunk_insert_concurrency: 8,
entity_embedding_concurrency: 4,
}
}
}
#[derive(Debug, Clone, Default)]
pub struct IngestionConfig {
pub tuning: IngestionTuning,
}

View File

@@ -0,0 +1,76 @@
use common::{
error::AppError,
storage::{
db::SurrealDbClient,
types::{ingestion_task::IngestionTask, text_content::TextContent},
},
};
use composite_retrieval::RetrievedEntity;
use tracing::error;
use super::enrichment_result::LLMEnrichmentResult;
use super::{config::IngestionConfig, services::PipelineServices};
pub struct PipelineContext<'a> {
pub task: &'a IngestionTask,
pub task_id: String,
pub attempt: u32,
pub db: &'a SurrealDbClient,
pub pipeline_config: &'a IngestionConfig,
pub services: &'a dyn PipelineServices,
pub text_content: Option<TextContent>,
pub similar_entities: Vec<RetrievedEntity>,
pub analysis: Option<LLMEnrichmentResult>,
}
impl<'a> PipelineContext<'a> {
pub fn new(
task: &'a IngestionTask,
db: &'a SurrealDbClient,
pipeline_config: &'a IngestionConfig,
services: &'a dyn PipelineServices,
) -> Self {
let task_id = task.id.clone();
let attempt = task.attempts;
Self {
task,
task_id,
attempt,
db,
pipeline_config,
services,
text_content: None,
similar_entities: Vec::new(),
analysis: None,
}
}
pub fn text_content(&self) -> Result<&TextContent, AppError> {
self.text_content
.as_ref()
.ok_or_else(|| AppError::InternalError("text content expected to be available".into()))
}
pub fn take_text_content(&mut self) -> Result<TextContent, AppError> {
self.text_content.take().ok_or_else(|| {
AppError::InternalError("text content expected to be available for persistence".into())
})
}
pub fn take_analysis(&mut self) -> Result<LLMEnrichmentResult, AppError> {
self.analysis.take().ok_or_else(|| {
AppError::InternalError("analysis expected to be available for persistence".into())
})
}
pub fn abort(&mut self, err: AppError) -> AppError {
error!(
task_id = %self.task_id,
attempt = self.attempt,
error = %err,
"ingestion pipeline aborted"
);
err
}
}

View File

@@ -1,8 +1,8 @@
use std::sync::{Arc, Mutex};
use std::sync::Arc;
use chrono::Utc;
use futures::stream::{self, StreamExt, TryStreamExt};
use serde::{Deserialize, Serialize};
use tokio::task;
use common::{
error::AppError,
@@ -15,28 +15,25 @@ use common::{
},
utils::embedding::generate_embedding,
};
use futures::future::try_join_all;
use crate::utils::GraphMapper;
use crate::utils::graph_mapper::GraphMapper;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct LLMKnowledgeEntity {
pub key: String, // Temporary identifier
pub key: String,
pub name: String,
pub description: String,
pub entity_type: String, // Should match KnowledgeEntityType variants
pub entity_type: String,
}
/// Represents a single relationship from the LLM.
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct LLMRelationship {
#[serde(rename = "type")]
pub type_: String, // e.g., RelatedTo, RelevantTo
pub source: String, // Key of the source entity
pub target: String, // Key of the target entity
pub type_: String,
pub source: String,
pub target: String,
}
/// Represents the entire graph analysis result from the LLM.
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct LLMEnrichmentResult {
pub knowledge_entities: Vec<LLMKnowledgeEntity>,
@@ -44,27 +41,16 @@ pub struct LLMEnrichmentResult {
}
impl LLMEnrichmentResult {
/// Converts the LLM graph analysis result into database entities and relationships.
///
/// # Arguments
///
/// * `source_id` - A UUID representing the source identifier.
/// * `openai_client` - OpenAI client for LLM calls.
///
/// # Returns
///
/// * `Result<(Vec<KnowledgeEntity>, Vec<KnowledgeRelationship>), AppError>` - A tuple containing vectors of `KnowledgeEntity` and `KnowledgeRelationship`.
pub async fn to_database_entities(
&self,
source_id: &str,
user_id: &str,
openai_client: &async_openai::Client<async_openai::config::OpenAIConfig>,
db_client: &SurrealDbClient,
entity_concurrency: usize,
) -> Result<(Vec<KnowledgeEntity>, Vec<KnowledgeRelationship>), AppError> {
// Create mapper and pre-assign IDs
let mapper = Arc::new(Mutex::new(self.create_mapper()?));
let mapper = Arc::new(self.create_mapper()?);
// Process entities
let entities = self
.process_entities(
source_id,
@@ -72,10 +58,10 @@ impl LLMEnrichmentResult {
Arc::clone(&mapper),
openai_client,
db_client,
entity_concurrency,
)
.await?;
// Process relationships
let relationships = self.process_relationships(source_id, user_id, Arc::clone(&mapper))?;
Ok((entities, relationships))
@@ -84,7 +70,6 @@ impl LLMEnrichmentResult {
fn create_mapper(&self) -> Result<GraphMapper, AppError> {
let mut mapper = GraphMapper::new();
// Pre-assign all IDs
for entity in &self.knowledge_entities {
mapper.assign_id(&entity.key);
}
@@ -96,57 +81,46 @@ impl LLMEnrichmentResult {
&self,
source_id: &str,
user_id: &str,
mapper: Arc<Mutex<GraphMapper>>,
mapper: Arc<GraphMapper>,
openai_client: &async_openai::Client<async_openai::config::OpenAIConfig>,
db_client: &SurrealDbClient,
entity_concurrency: usize,
) -> Result<Vec<KnowledgeEntity>, AppError> {
let futures: Vec<_> = self
.knowledge_entities
.iter()
.map(|entity| {
let mapper = Arc::clone(&mapper);
let openai_client = openai_client.clone();
let source_id = source_id.to_string();
let user_id = user_id.to_string();
let entity = entity.clone();
let db_client = db_client.clone();
stream::iter(self.knowledge_entities.iter().cloned().map(|entity| {
let mapper = Arc::clone(&mapper);
let openai_client = openai_client.clone();
let source_id = source_id.to_string();
let user_id = user_id.to_string();
let db_client = db_client.clone();
task::spawn(async move {
create_single_entity(
&entity,
&source_id,
&user_id,
mapper,
&openai_client,
&db_client.clone(),
)
.await
})
})
.collect();
let results = try_join_all(futures)
.await?
.into_iter()
.collect::<Result<Vec<_>, _>>()?;
Ok(results)
async move {
create_single_entity(
&entity,
&source_id,
&user_id,
mapper,
&openai_client,
&db_client,
)
.await
}
}))
.buffer_unordered(entity_concurrency.max(1))
.try_collect()
.await
}
fn process_relationships(
&self,
source_id: &str,
user_id: &str,
mapper: Arc<Mutex<GraphMapper>>,
mapper: Arc<GraphMapper>,
) -> Result<Vec<KnowledgeRelationship>, AppError> {
let mapper_guard = mapper
.lock()
.map_err(|_| AppError::GraphMapper("Failed to lock mapper".into()))?;
self.relationships
.iter()
.map(|rel| {
let source_db_id = mapper_guard.get_or_parse_id(&rel.source)?;
let target_db_id = mapper_guard.get_or_parse_id(&rel.target)?;
let source_db_id = mapper.get_or_parse_id(&rel.source)?;
let target_db_id = mapper.get_or_parse_id(&rel.target)?;
Ok(KnowledgeRelationship::new(
source_db_id.to_string(),
@@ -159,20 +133,16 @@ impl LLMEnrichmentResult {
.collect()
}
}
async fn create_single_entity(
llm_entity: &LLMKnowledgeEntity,
source_id: &str,
user_id: &str,
mapper: Arc<Mutex<GraphMapper>>,
mapper: Arc<GraphMapper>,
openai_client: &async_openai::Client<async_openai::config::OpenAIConfig>,
db_client: &SurrealDbClient,
) -> Result<KnowledgeEntity, AppError> {
let assigned_id = {
let mapper = mapper
.lock()
.map_err(|_| AppError::GraphMapper("Failed to lock mapper".into()))?;
mapper.get_id(&llm_entity.key)?.to_string()
};
let assigned_id = mapper.get_id(&llm_entity.key)?.to_string();
let embedding_input = format!(
"name: {}, description: {}, type: {}",

View File

@@ -0,0 +1,230 @@
mod config;
mod context;
mod enrichment_result;
mod preparation;
mod services;
mod stages;
mod state;
pub use config::{IngestionConfig, IngestionTuning};
pub use services::{DefaultPipelineServices, PipelineServices};
use std::{
sync::Arc,
time::{Duration, Instant},
};
use async_openai::Client;
use common::{
error::AppError,
storage::{
db::SurrealDbClient,
store::StorageManager,
types::{
ingestion_payload::IngestionPayload,
ingestion_task::{IngestionTask, TaskErrorInfo},
},
},
utils::config::AppConfig,
};
use composite_retrieval::reranking::RerankerPool;
use tracing::{debug, info, warn};
use self::{
context::PipelineContext,
stages::{enrich, persist, prepare_content, retrieve_related},
state::ready,
};
pub struct IngestionPipeline {
db: Arc<SurrealDbClient>,
pipeline_config: IngestionConfig,
services: Arc<dyn PipelineServices>,
}
impl IngestionPipeline {
pub async fn new(
db: Arc<SurrealDbClient>,
openai_client: Arc<Client<async_openai::config::OpenAIConfig>>,
config: AppConfig,
reranker_pool: Option<Arc<RerankerPool>>,
storage: StorageManager,
) -> Result<Self, AppError> {
let services = DefaultPipelineServices::new(
db.clone(),
openai_client.clone(),
config.clone(),
reranker_pool,
storage,
);
Self::with_services(db, IngestionConfig::default(), Arc::new(services))
}
pub fn with_services(
db: Arc<SurrealDbClient>,
pipeline_config: IngestionConfig,
services: Arc<dyn PipelineServices>,
) -> Result<Self, AppError> {
Ok(Self {
db,
pipeline_config,
services,
})
}
#[tracing::instrument(
skip_all,
fields(
task_id = %task.id,
attempt = task.attempts,
worker_id = task.worker_id.as_deref().unwrap_or("unknown-worker"),
user_id = %task.user_id
)
)]
pub async fn process_task(&self, task: IngestionTask) -> Result<(), AppError> {
let mut processing_task = task.mark_processing(&self.db).await?;
let payload = std::mem::replace(
&mut processing_task.content,
IngestionPayload::Text {
text: String::new(),
context: String::new(),
category: String::new(),
user_id: processing_task.user_id.clone(),
},
);
match self
.drive_pipeline(&processing_task, payload)
.await
.map_err(|err| {
debug!(
task_id = %processing_task.id,
attempt = processing_task.attempts,
error = %err,
"ingestion pipeline failed"
);
err
}) {
Ok(()) => {
processing_task.mark_succeeded(&self.db).await?;
tracing::info!(
task_id = %processing_task.id,
attempt = processing_task.attempts,
"ingestion task succeeded"
);
Ok(())
}
Err(err) => {
let reason = err.to_string();
let retryable = !matches!(err, AppError::Validation(_));
let error_info = TaskErrorInfo {
code: None,
message: reason.clone(),
};
if retryable && processing_task.can_retry() {
let delay = self.retry_delay(processing_task.attempts);
processing_task
.mark_failed(error_info, delay, &self.db)
.await?;
warn!(
task_id = %processing_task.id,
attempt = processing_task.attempts,
retry_in_secs = delay.as_secs(),
"ingestion task failed; scheduled retry"
);
} else {
let failed_task = processing_task
.mark_failed(error_info.clone(), Duration::from_secs(0), &self.db)
.await?;
failed_task.mark_dead_letter(error_info, &self.db).await?;
warn!(
task_id = %failed_task.id,
attempt = failed_task.attempts,
"ingestion task failed; moved to dead letter queue"
);
}
Err(AppError::Processing(reason))
}
}
}
fn retry_delay(&self, attempt: u32) -> Duration {
let tuning = &self.pipeline_config.tuning;
let capped_attempt = attempt
.saturating_sub(1)
.min(tuning.retry_backoff_cap_exponent);
let multiplier = 2_u64.pow(capped_attempt);
let delay = tuning.retry_base_delay_secs * multiplier;
Duration::from_secs(delay.min(tuning.retry_max_delay_secs))
}
#[tracing::instrument(
skip_all,
fields(task_id = %task.id, attempt = task.attempts, user_id = %task.user_id)
)]
async fn drive_pipeline(
&self,
task: &IngestionTask,
payload: IngestionPayload,
) -> Result<(), AppError> {
let mut ctx = PipelineContext::new(
task,
self.db.as_ref(),
&self.pipeline_config,
self.services.as_ref(),
);
let machine = ready();
let pipeline_started = Instant::now();
let stage_start = Instant::now();
let machine = prepare_content(machine, &mut ctx, payload)
.await
.map_err(|err| ctx.abort(err))?;
let prepare_duration = stage_start.elapsed();
let stage_start = Instant::now();
let machine = retrieve_related(machine, &mut ctx)
.await
.map_err(|err| ctx.abort(err))?;
let retrieve_duration = stage_start.elapsed();
let stage_start = Instant::now();
let machine = enrich(machine, &mut ctx)
.await
.map_err(|err| ctx.abort(err))?;
let enrich_duration = stage_start.elapsed();
let stage_start = Instant::now();
let _machine = persist(machine, &mut ctx)
.await
.map_err(|err| ctx.abort(err))?;
let persist_duration = stage_start.elapsed();
let total_duration = pipeline_started.elapsed();
let prepare_ms = prepare_duration.as_millis() as u64;
let retrieve_ms = retrieve_duration.as_millis() as u64;
let enrich_ms = enrich_duration.as_millis() as u64;
let persist_ms = persist_duration.as_millis() as u64;
info!(
task_id = %ctx.task_id,
attempt = ctx.attempt,
total_ms = total_duration.as_millis() as u64,
prepare_ms,
retrieve_ms,
enrich_ms,
persist_ms,
"ingestion pipeline finished"
);
Ok(())
}
}
#[cfg(test)]
mod tests;

View File

@@ -0,0 +1,77 @@
use common::{
error::AppError,
storage::{
db::SurrealDbClient,
store::StorageManager,
types::{
ingestion_payload::IngestionPayload,
text_content::{TextContent, UrlInfo},
},
},
utils::config::AppConfig,
};
use crate::utils::{
file_text_extraction::extract_text_from_file, url_text_retrieval::extract_text_from_url,
};
pub(crate) async fn to_text_content(
ingestion_payload: IngestionPayload,
db: &SurrealDbClient,
config: &AppConfig,
openai_client: &async_openai::Client<async_openai::config::OpenAIConfig>,
storage: &StorageManager,
) -> Result<TextContent, AppError> {
match ingestion_payload {
IngestionPayload::Url {
url,
context,
category,
user_id,
} => {
let (article, file_info) = extract_text_from_url(&url, db, &user_id, storage).await?;
Ok(TextContent::new(
article.text_content.into(),
Some(context),
category,
None,
Some(UrlInfo {
url,
title: article.title,
image_id: file_info.id,
}),
user_id,
))
}
IngestionPayload::Text {
text,
context,
category,
user_id,
} => Ok(TextContent::new(
text,
Some(context),
category,
None,
None,
user_id,
)),
IngestionPayload::File {
file_info,
context,
category,
user_id,
} => {
let text =
extract_text_from_file(&file_info, db, openai_client, config, storage).await?;
Ok(TextContent::new(
text,
Some(context),
category,
Some(file_info),
None,
user_id,
))
}
}
}

View File

@@ -0,0 +1,241 @@
use std::{ops::Range, sync::Arc};
use async_openai::types::{
ChatCompletionRequestSystemMessage, ChatCompletionRequestUserMessage,
CreateChatCompletionRequest, CreateChatCompletionRequestArgs, ResponseFormat,
ResponseFormatJsonSchema,
};
use async_trait::async_trait;
use common::{
error::AppError,
storage::{
db::SurrealDbClient,
store::StorageManager,
types::{
ingestion_payload::IngestionPayload, knowledge_entity::KnowledgeEntity,
knowledge_relationship::KnowledgeRelationship, system_settings::SystemSettings,
text_chunk::TextChunk, text_content::TextContent,
},
},
utils::{config::AppConfig, embedding::generate_embedding},
};
use composite_retrieval::{
reranking::RerankerPool, retrieve_entities, retrieved_entities_to_json, RetrievedEntity,
};
use text_splitter::TextSplitter;
use super::{enrichment_result::LLMEnrichmentResult, preparation::to_text_content};
use crate::utils::llm_instructions::{
get_ingress_analysis_schema, INGRESS_ANALYSIS_SYSTEM_MESSAGE,
};
#[async_trait]
pub trait PipelineServices: Send + Sync {
async fn prepare_text_content(
&self,
payload: IngestionPayload,
) -> Result<TextContent, AppError>;
async fn retrieve_similar_entities(
&self,
content: &TextContent,
) -> Result<Vec<RetrievedEntity>, AppError>;
async fn run_enrichment(
&self,
content: &TextContent,
similar_entities: &[RetrievedEntity],
) -> Result<LLMEnrichmentResult, AppError>;
async fn convert_analysis(
&self,
content: &TextContent,
analysis: &LLMEnrichmentResult,
entity_concurrency: usize,
) -> Result<(Vec<KnowledgeEntity>, Vec<KnowledgeRelationship>), AppError>;
async fn prepare_chunks(
&self,
content: &TextContent,
range: Range<usize>,
) -> Result<Vec<TextChunk>, AppError>;
}
pub struct DefaultPipelineServices {
db: Arc<SurrealDbClient>,
openai_client: Arc<async_openai::Client<async_openai::config::OpenAIConfig>>,
config: AppConfig,
reranker_pool: Option<Arc<RerankerPool>>,
storage: StorageManager,
}
impl DefaultPipelineServices {
pub fn new(
db: Arc<SurrealDbClient>,
openai_client: Arc<async_openai::Client<async_openai::config::OpenAIConfig>>,
config: AppConfig,
reranker_pool: Option<Arc<RerankerPool>>,
storage: StorageManager,
) -> Self {
Self {
db,
openai_client,
config,
reranker_pool,
storage,
}
}
async fn prepare_llm_request(
&self,
category: &str,
context: Option<&str>,
text: &str,
similar_entities: &[RetrievedEntity],
) -> Result<CreateChatCompletionRequest, AppError> {
let settings = SystemSettings::get_current(&self.db).await?;
let entities_json = retrieved_entities_to_json(similar_entities);
let user_message = format!(
"Category:\n{category}\ncontext:\n{context:?}\nContent:\n{text}\nExisting KnowledgeEntities in database:\n{entities_json}"
);
let response_format = ResponseFormat::JsonSchema {
json_schema: ResponseFormatJsonSchema {
description: Some("Structured analysis of the submitted content".into()),
name: "content_analysis".into(),
schema: Some(get_ingress_analysis_schema()),
strict: Some(true),
},
};
let request = CreateChatCompletionRequestArgs::default()
.model(&settings.processing_model)
.messages([
ChatCompletionRequestSystemMessage::from(INGRESS_ANALYSIS_SYSTEM_MESSAGE).into(),
ChatCompletionRequestUserMessage::from(user_message).into(),
])
.response_format(response_format)
.build()?;
Ok(request)
}
async fn perform_analysis(
&self,
request: CreateChatCompletionRequest,
) -> Result<LLMEnrichmentResult, AppError> {
let response = self.openai_client.chat().create(request).await?;
let content = response
.choices
.first()
.and_then(|choice| choice.message.content.as_ref())
.ok_or(AppError::LLMParsing(
"No content found in LLM response".into(),
))?;
serde_json::from_str::<LLMEnrichmentResult>(content).map_err(|e| {
AppError::LLMParsing(format!("Failed to parse LLM response into analysis: {e}"))
})
}
}
#[async_trait]
impl PipelineServices for DefaultPipelineServices {
async fn prepare_text_content(
&self,
payload: IngestionPayload,
) -> Result<TextContent, AppError> {
to_text_content(
payload,
&self.db,
&self.config,
&self.openai_client,
&self.storage,
)
.await
}
async fn retrieve_similar_entities(
&self,
content: &TextContent,
) -> Result<Vec<RetrievedEntity>, AppError> {
let input_text = format!(
"content: {}, category: {}, user_context: {:?}",
content.text, content.category, content.context
);
let rerank_lease = match &self.reranker_pool {
Some(pool) => Some(pool.checkout().await),
None => None,
};
retrieve_entities(
&self.db,
&self.openai_client,
&input_text,
&content.user_id,
rerank_lease,
)
.await
}
async fn run_enrichment(
&self,
content: &TextContent,
similar_entities: &[RetrievedEntity],
) -> Result<LLMEnrichmentResult, AppError> {
let request = self
.prepare_llm_request(
&content.category,
content.context.as_deref(),
&content.text,
similar_entities,
)
.await?;
self.perform_analysis(request).await
}
async fn convert_analysis(
&self,
content: &TextContent,
analysis: &LLMEnrichmentResult,
entity_concurrency: usize,
) -> Result<(Vec<KnowledgeEntity>, Vec<KnowledgeRelationship>), AppError> {
analysis
.to_database_entities(
&content.id,
&content.user_id,
&self.openai_client,
&self.db,
entity_concurrency,
)
.await
}
async fn prepare_chunks(
&self,
content: &TextContent,
range: Range<usize>,
) -> Result<Vec<TextChunk>, AppError> {
let splitter = TextSplitter::new(range.clone());
let chunk_texts: Vec<String> = splitter
.chunks(&content.text)
.map(|chunk| chunk.to_string())
.collect();
let mut chunks = Vec::with_capacity(chunk_texts.len());
for chunk in chunk_texts {
let embedding = generate_embedding(&self.openai_client, &chunk, &self.db).await?;
chunks.push(TextChunk::new(
content.id.clone(),
chunk,
embedding,
content.user_id.clone(),
));
}
Ok(chunks)
}
}

View File

@@ -0,0 +1,338 @@
use std::sync::Arc;
use common::{
error::AppError,
storage::{
db::SurrealDbClient,
types::{
ingestion_payload::IngestionPayload, knowledge_entity::KnowledgeEntity,
knowledge_relationship::KnowledgeRelationship, text_chunk::TextChunk,
text_content::TextContent,
},
},
};
use state_machines::core::GuardError;
use tokio::time::{sleep, Duration};
use tracing::{debug, instrument, warn};
use super::{
context::PipelineContext,
services::PipelineServices,
state::{ContentPrepared, Enriched, IngestionMachine, Persisted, Ready, Retrieved},
};
#[instrument(
level = "trace",
skip_all,
fields(task_id = %ctx.task_id, attempt = ctx.attempt, user_id = %ctx.task.user_id)
)]
pub async fn prepare_content(
machine: IngestionMachine<(), Ready>,
ctx: &mut PipelineContext<'_>,
payload: IngestionPayload,
) -> Result<IngestionMachine<(), ContentPrepared>, AppError> {
let text_content = ctx.services.prepare_text_content(payload).await?;
let text_len = text_content.text.chars().count();
let preview: String = text_content.text.chars().take(120).collect();
let preview_clean = preview.replace('\n', " ");
let preview_len = preview_clean.chars().count();
let truncated = text_len > preview_len;
let context_len = text_content
.context
.as_ref()
.map(|c| c.chars().count())
.unwrap_or(0);
tracing::info!(
task_id = %ctx.task_id,
attempt = ctx.attempt,
user_id = %text_content.user_id,
category = %text_content.category,
text_chars = text_len,
context_chars = context_len,
attachments = text_content.file_info.is_some(),
"ingestion task input ready"
);
debug!(
task_id = %ctx.task_id,
attempt = ctx.attempt,
preview = %preview_clean,
preview_truncated = truncated,
"ingestion task input preview"
);
ctx.text_content = Some(text_content);
machine
.prepare()
.map_err(|(_, guard)| map_guard_error("prepare", guard))
}
#[instrument(
level = "trace",
skip_all,
fields(task_id = %ctx.task_id, attempt = ctx.attempt, user_id = %ctx.task.user_id)
)]
pub async fn retrieve_related(
machine: IngestionMachine<(), ContentPrepared>,
ctx: &mut PipelineContext<'_>,
) -> Result<IngestionMachine<(), Retrieved>, AppError> {
let content = ctx.text_content()?;
let similar = ctx.services.retrieve_similar_entities(content).await?;
debug!(
task_id = %ctx.task_id,
attempt = ctx.attempt,
similar_count = similar.len(),
"ingestion retrieved similar entities"
);
ctx.similar_entities = similar;
machine
.retrieve()
.map_err(|(_, guard)| map_guard_error("retrieve", guard))
}
#[instrument(
level = "trace",
skip_all,
fields(task_id = %ctx.task_id, attempt = ctx.attempt, user_id = %ctx.task.user_id)
)]
pub async fn enrich(
machine: IngestionMachine<(), Retrieved>,
ctx: &mut PipelineContext<'_>,
) -> Result<IngestionMachine<(), Enriched>, AppError> {
let content = ctx.text_content()?;
let analysis = ctx
.services
.run_enrichment(content, &ctx.similar_entities)
.await?;
debug!(
task_id = %ctx.task_id,
attempt = ctx.attempt,
entity_suggestions = analysis.knowledge_entities.len(),
relationship_suggestions = analysis.relationships.len(),
"ingestion enrichment completed"
);
ctx.analysis = Some(analysis);
machine
.enrich()
.map_err(|(_, guard)| map_guard_error("enrich", guard))
}
#[instrument(
level = "trace",
skip_all,
fields(task_id = %ctx.task_id, attempt = ctx.attempt, user_id = %ctx.task.user_id)
)]
pub async fn persist(
machine: IngestionMachine<(), Enriched>,
ctx: &mut PipelineContext<'_>,
) -> Result<IngestionMachine<(), Persisted>, AppError> {
let content = ctx.take_text_content()?;
let analysis = ctx.take_analysis()?;
let (entities, relationships) = ctx
.services
.convert_analysis(
&content,
&analysis,
ctx.pipeline_config.tuning.entity_embedding_concurrency,
)
.await?;
let entity_count = entities.len();
let relationship_count = relationships.len();
let chunk_range =
ctx.pipeline_config.tuning.chunk_min_chars..ctx.pipeline_config.tuning.chunk_max_chars;
let ((), chunk_count) = tokio::try_join!(
store_graph_entities(ctx.db, &ctx.pipeline_config.tuning, entities, relationships),
store_vector_chunks(
ctx.db,
ctx.services,
ctx.task_id.as_str(),
&content,
chunk_range,
&ctx.pipeline_config.tuning
)
)?;
ctx.db.store_item(content).await?;
ctx.db.rebuild_indexes().await?;
debug!(
task_id = %ctx.task_id,
attempt = ctx.attempt,
entity_count,
relationship_count,
chunk_count,
"ingestion persistence flushed to database"
);
machine
.persist()
.map_err(|(_, guard)| map_guard_error("persist", guard))
}
fn map_guard_error(event: &str, guard: GuardError) -> AppError {
AppError::InternalError(format!(
"invalid ingestion pipeline transition during {event}: {guard:?}"
))
}
async fn store_graph_entities(
db: &SurrealDbClient,
tuning: &super::config::IngestionTuning,
entities: Vec<KnowledgeEntity>,
relationships: Vec<KnowledgeRelationship>,
) -> Result<(), AppError> {
const STORE_GRAPH_MUTATION: &str = r"
BEGIN TRANSACTION;
LET $entities = $entities;
LET $relationships = $relationships;
FOR $entity IN $entities {
CREATE type::thing('knowledge_entity', $entity.id) CONTENT $entity;
};
FOR $relationship IN $relationships {
LET $in_node = type::thing('knowledge_entity', $relationship.in);
LET $out_node = type::thing('knowledge_entity', $relationship.out);
RELATE $in_node->relates_to->$out_node CONTENT {
id: type::thing('relates_to', $relationship.id),
metadata: $relationship.metadata
};
};
COMMIT TRANSACTION;
";
let entities = Arc::new(entities);
let relationships = Arc::new(relationships);
let mut backoff_ms = tuning.graph_initial_backoff_ms;
for attempt in 0..tuning.graph_store_attempts {
let result = db
.client
.query(STORE_GRAPH_MUTATION)
.bind(("entities", entities.clone()))
.bind(("relationships", relationships.clone()))
.await;
match result {
Ok(_) => return Ok(()),
Err(err) => {
if is_retryable_conflict(&err) && attempt + 1 < tuning.graph_store_attempts {
warn!(
attempt = attempt + 1,
"Transient SurrealDB conflict while storing graph data; retrying"
);
sleep(Duration::from_millis(backoff_ms)).await;
backoff_ms = (backoff_ms * 2).min(tuning.graph_max_backoff_ms);
continue;
}
return Err(AppError::from(err));
}
}
}
Err(AppError::InternalError(
"Failed to store graph entities after retries".to_string(),
))
}
async fn store_vector_chunks(
db: &SurrealDbClient,
services: &dyn PipelineServices,
task_id: &str,
content: &TextContent,
chunk_range: std::ops::Range<usize>,
tuning: &super::config::IngestionTuning,
) -> Result<usize, AppError> {
let prepared_chunks = services.prepare_chunks(content, chunk_range).await?;
let chunk_count = prepared_chunks.len();
let batch_size = tuning.chunk_insert_concurrency.max(1);
for chunk in &prepared_chunks {
debug!(
task_id = %task_id,
chunk_id = %chunk.id,
chunk_len = chunk.chunk.chars().count(),
"chunk persisted"
);
}
for batch in prepared_chunks.chunks(batch_size) {
store_chunk_batch(db, batch, tuning).await?;
}
Ok(chunk_count)
}
fn is_retryable_conflict(error: &surrealdb::Error) -> bool {
error
.to_string()
.contains("Failed to commit transaction due to a read or write conflict")
}
async fn store_chunk_batch(
db: &SurrealDbClient,
batch: &[TextChunk],
tuning: &super::config::IngestionTuning,
) -> Result<(), AppError> {
if batch.is_empty() {
return Ok(());
}
const STORE_CHUNKS_MUTATION: &str = r"
BEGIN TRANSACTION;
LET $chunks = $chunks;
FOR $chunk IN $chunks {
CREATE type::thing('text_chunk', $chunk.id) CONTENT $chunk;
};
COMMIT TRANSACTION;
";
let chunks = Arc::new(batch.to_vec());
let mut backoff_ms = tuning.graph_initial_backoff_ms;
for attempt in 0..tuning.graph_store_attempts {
let result = db
.client
.query(STORE_CHUNKS_MUTATION)
.bind(("chunks", chunks.clone()))
.await;
match result {
Ok(_) => return Ok(()),
Err(err) => {
if is_retryable_conflict(&err) && attempt + 1 < tuning.graph_store_attempts {
warn!(
attempt = attempt + 1,
"Transient SurrealDB conflict while storing chunks; retrying"
);
sleep(Duration::from_millis(backoff_ms)).await;
backoff_ms = (backoff_ms * 2).min(tuning.graph_max_backoff_ms);
continue;
}
return Err(AppError::from(err));
}
}
}
Err(AppError::InternalError(
"Failed to store text chunks after retries".to_string(),
))
}

Some files were not shown because too many files have changed in this diff Show More