feat: avante.nvim - update providers

This commit is contained in:
Ryan Yin
2025-06-07 11:56:30 +08:00
parent 285662df1e
commit 10924e67e1
3 changed files with 121 additions and 28 deletions

View File

@@ -19,8 +19,8 @@ return {
},
version = false, -- Never set this value to "*"! Never!
opts = {
provider = "deepseek_reasoner",
cursor_applying_provider = "deepseek_reasoner", -- In this example, use Groq for applying, but you can also use any provider you want.
provider = "openrouter_claude_4",
cursor_applying_provider = "openrouter_claude_4",
behaviour = {
-- auto_suggestions = true,
enable_cursor_planning_mode = true, -- enable cursor planning mode!
@@ -28,44 +28,104 @@ return {
-- WARNING: Since auto-suggestions are a high-frequency operation and therefore expensive,
-- currently designating it as `copilot` provider is dangerous because: https://github.com/yetone/avante.nvim/issues/1048
-- Of course, you can reduce the request frequency by increasing `suggestion.debounce`.
auto_suggestions_provider = "aliyun_qwen3",
auto_suggestions_provider = "ollama",
suggestion = {
debounce = 750, -- wait for x ms before suggestion
throttle = 1200, -- wait for at least x ms before the next suggestion
},
ollama = {
endpoint = "http://192.168.5.100:11434", -- Note that there is no /v1 at the end.
model = "modelscope.cn/unsloth/Qwen3-30B-A3B-GGUF",
-- model = "modelscope.cn/unsloth/Qwen3-235B-A22B-GGUF",
web_search_engine = {
provider = "google", -- tavily, serpapi, searchapi, google, kagi, brave, or searxng
proxy = nil, -- proxy support, e.g., http://127.0.0.1:7890
},
vendors = {
deepseek_coder = {
__inherited_from = "openai",
api_key_name = "DEEPSEEK_API_KEY",
endpoint = "https://api.deepseek.com",
model = "deepseek-coder",
providers = {
ollama = {
endpoint = "http://192.168.5.100:11434", -- Note that there is no /v1 at the end.
model = "modelscope.cn/unsloth/Qwen3-30B-A3B-GGUF",
-- model = "modelscope.cn/unsloth/Qwen3-32B-GGUF",
},
-- deepseek chat v3
deepseek_chat = {
__inherited_from = "openai",
api_key_name = "DEEPSEEK_API_KEY",
endpoint = "https://api.deepseek.com",
model = "deepseek-chat",
-- ==============================================
-- https://aistudio.google.com/prompts/new_chat
-- ==============================================
gemini = {
api_key_name = "GEMINI_API_KEY",
model = "gemini-2.5-pro-preview-06-05",
timeout = 30000, -- Timeout in milliseconds, increase this for reasoning models
temperature = 0,
max_completion_tokens = 8192, -- Increase this to include reasoning tokens (for reasoning models)
--reasoning_effort = "medium", -- low|medium|high, only used for reasoning models
},
-- deepseek r1
deepseek_reasoner = {
-- ==============================================
-- https://openrouter.ai/rankings
-- ==============================================
openrouter_claude_4 = {
__inherited_from = "openai",
api_key_name = "DEEPSEEK_API_KEY",
endpoint = "https://api.deepseek.com",
model = "deepseek-reasoner",
endpoint = "https://openrouter.ai/api/v1",
api_key_name = "OPENROUTER_API_KEY",
model = "anthropic/claude-sonnet-4",
},
-- ==============================================
-- https://bailian.console.aliyun.com/?tab=model
-- ==============================================
aliyun_qwen3 = {
__inherited_from = "openai",
api_key_name = "DASHSCOPE_API_KEY",
endpoint = "https://dashscope.aliyuncs.com/compatible-mode/v1",
-- model = "qwen-coder-plus-latest",
model = "qwen3-235b-a22b",
-- disable_tools = true,
},
aliyun_dpr1 = {
__inherited_from = "openai",
api_key_name = "DASHSCOPE_API_KEY",
endpoint = "https://dashscope.aliyuncs.com/compatible-mode/v1",
model = "deepseek-r1-0528",
disable_tools = true,
},
-- ==============================================
-- https://console.volcengine.com/ark/region:ark+cn-beijing/model?feature=&vendor=DeepSeek&view=VENDOR_VIEW
-- ==============================================
ark_dpr1 = {
__inherited_from = "openai",
api_key_name = "ARK_API_KEY",
endpoint = "https://ark.cn-beijing.volces.com/api/v3",
model = "deepseek-r1-250528",
-- disable_tools = true,
},
-- ==============================================
-- https://cloud.siliconflow.cn/models
-- ==============================================
sflow_dpr1 = {
__inherited_from = "openai",
api_key_name = "SILICONFLOW_API_KEY",
endpoint = "https://api.siliconflow.cn/v1",
model = "Pro/deepseek-ai/DeepSeek-R1",
-- disable_tools = true,
},
-- ==============================================
-- https://platform.deepseek.com/usage
-- ==============================================
dp_coder = {
__inherited_from = "openai",
api_key_name = "DEEPSEEK_API_KEY",
endpoint = "https://api.deepseek.com",
model = "deepseek-coder",
},
-- deepseek chat v3
dp_chat = {
__inherited_from = "openai",
api_key_name = "DEEPSEEK_API_KEY",
endpoint = "https://api.deepseek.com",
model = "deepseek-chat",
-- disable_tools = true,
},
-- deepseek r1
dp_r1 = {
__inherited_from = "openai",
api_key_name = "DEEPSEEK_API_KEY",
endpoint = "https://api.deepseek.com",
model = "deepseek-reasoner",
-- disable_tools = true,
},
},
},

View File

@@ -1,6 +1,39 @@
-- File explorer(Custom configs)
return {
"nvim-neo-tree/neo-tree.nvim",
-- Add shortcutes for avante.nvim
config = function()
require("neo-tree").setup {
filesystem = {
commands = {
avante_add_files = function(state)
local node = state.tree:get_node()
local filepath = node:get_id()
local relative_path = require("avante.utils").relative_path(filepath)
local sidebar = require("avante").get()
local open = sidebar:is_open()
-- ensure avante sidebar is open
if not open then
require("avante.api").ask()
sidebar = require("avante").get()
end
sidebar.file_selector:add_selected_file(relative_path)
-- remove neo tree buffer
if not open then sidebar.file_selector:remove_selected_file "neo-tree filesystem [1]" end
end,
},
window = {
mappings = {
["oa"] = "avante_add_files",
},
},
},
}
end,
opts = {
filesystem = {
filtered_items = {