feat(idols-ai): add ollama

This commit is contained in:
Ryan Yin
2025-04-30 14:03:21 +08:00
parent 25e1604cda
commit 8c40436e41
5 changed files with 48 additions and 0 deletions

17
flake.lock generated
View File

@@ -1130,6 +1130,22 @@
"url": "https://github.com/NixOS/nixpkgs/archive/356624c12086a18f2ea2825fed34523d60ccc4e3.tar.gz"
}
},
"nixpkgs-ollama": {
"locked": {
"lastModified": 1745930157,
"narHash": "sha256-y3h3NLnzRSiUkYpnfvnS669zWZLoqqI6NprtLQ+5dck=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "46e634be05ce9dc6d4db8e664515ba10b78151ae",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-regression": {
"locked": {
"lastModified": 1643052045,
@@ -1639,6 +1655,7 @@
"nixpak": "nixpak",
"nixpkgs": "nixpkgs_3",
"nixpkgs-darwin": "nixpkgs-darwin",
"nixpkgs-ollama": "nixpkgs-ollama",
"nixpkgs-stable": "nixpkgs-stable_3",
"nixpkgs-unstable": "nixpkgs-unstable_2",
"nuenv": "nuenv",

View File

@@ -39,6 +39,8 @@
nixpkgs-unstable.url = "github:nixos/nixpkgs/nixos-unstable";
nixpkgs-stable.url = "github:nixos/nixpkgs/nixos-24.11";
nixpkgs-ollama.url = "github:nixos/nixpkgs/nixos-unstable";
# for macos
# nixpkgs-darwin.url = "github:nixos/nixpkgs/nixpkgs-24.11-darwin";
nixpkgs-darwin.url = "github:nixos/nixpkgs/nixpkgs-unstable";

View File

@@ -21,6 +21,11 @@ return {
opts = {
-- add any opts here
provider = "deepseek_coder",
ollama = {
endpoint = "http://127.0.0.1:11434", -- Note that there is no /v1 at the end.
model = "modelscope.cn/unsloth/Qwen3-30B-A3B-GGUF",
-- model = "modelscope.cn/unsloth/Qwen3-235B-A22B-GGUF",
},
vendors = {
openrouter_claude_4_7_sonnet = {
__inherited_from = "openai",

View File

@@ -0,0 +1,3 @@
{mylib, ...}: {
imports = mylib.scanPaths ./.;
}

View File

@@ -0,0 +1,21 @@
{
pkgs,
nixpkgs-ollama,
...
}: let
pkgs-ollama = import nixpkgs-ollama {
inherit (pkgs) system;
# To use cuda, we need to allow the installation of non-free software
config.allowUnfree = true;
};
in {
services.ollama = rec {
enable = true;
package = pkgs-ollama.ollama;
acceleration = "cuda";
host = "127.0.0.1";
port = 11434;
home = "/var/lib/ollama";
models = "${home}/models";
};
}