-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconfig.example.toml
More file actions
117 lines (104 loc) · 3.45 KB
/
config.example.toml
File metadata and controls
117 lines (104 loc) · 3.45 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
[app]
title = "OllamaTerm"
class = "ollamaterm"
connection_check_interval_seconds = 15
[ollama]
host = "http://localhost:11434"
model = "llama3.2"
models = ["llama3.2", "qwen2.5", "mistral"]
timeout = 120
system_prompt = "You are a helpful assistant."
max_history_messages = 200
max_context_tokens = 4096
pull_model_on_start = true
# Ollama API Bearer token for server authentication.
# Obtain from: https://ollama.com/settings/keys
# Used when connecting to ollama.com's hosted inference API, or when your
# local Ollama server is configured to require authentication.
# api_key = ""
[ui]
font_size = 14
background_color = "#1a1b26"
user_message_color = "#7aa2f7"
assistant_message_color = "#9ece6a"
border_color = "#565f89"
show_timestamps = true
stream_chunk_size = 8
[theme]
# Theme selection: "textual-dark", "textual-light", "nord", "gruvbox", "tokyo-night",
# "monokai", "dracula", "solarized-light", "solarized-dark", "atom-one-dark", "atom-one-light"
# or "custom" to use the ui colors above
name = "textual-dark"
# Persist theme choice across sessions
persist = true
# Custom theme definitions (optional)
[theme.custom]
# Define custom themes here - example for a "catppuccin" theme:
# [theme.custom.catppuccin]
# primary = "#89B4FA"
# secondary = "#74C7EC"
# accent = "#F5C2E7"
# foreground = "#CDD6F4"
# background = "#1E1E2E"
# surface = "#313244"
# panel = "#45475A"
# success = "#A6E3A1"
# warning = "#F9E2AF"
# error = "#F38BA8"
# dark = true
[keybinds]
send_message = "ctrl+enter"
new_conversation = "ctrl+n"
quit = "ctrl+q"
scroll_up = "ctrl+k"
scroll_down = "ctrl+j"
command_palette = "ctrl+p"
toggle_model_picker = "ctrl+m"
toggle_theme_picker = "ctrl+t"
save_conversation = "ctrl+s"
load_conversation = "ctrl+l"
export_conversation = "ctrl+e"
search_messages = "ctrl+f"
copy_last_message = "ctrl+y"
[security]
allow_remote_hosts = false
allowed_hosts = ["localhost", "127.0.0.1", "::1"]
[logging]
level = "INFO"
structured = true
log_to_file = false
log_file_path = "~/.local/state/ollamaterm/app.log"
[persistence]
enabled = true
directory = "~/.local/state/ollamaterm/conversations"
metadata_path = "~/.local/state/ollamaterm/conversations/index.json"
[tools]
# Enable schema-first custom coding tools (read/write/search/edit/bash/plan/todo/etc.)
enabled = true
# Base root for relative paths in file/search/edit tools.
workspace_root = "."
# Allow adding temporary external roots via external-directory tool.
allow_external_directories = false
# Safety/runtime limits.
command_timeout_seconds = 30
max_output_lines = 200
max_output_bytes = 50000
max_read_bytes = 200000
max_search_results = 200
# Optional always-allowed external roots.
default_external_directories = []
[capabilities]
# Whether to render the model's reasoning trace in the assistant bubble.
# The trace is shown only when the active model supports thinking (auto-detected).
show_thinking = true
# Enable Ollama's built-in web_search and web_fetch tools.
# Requires OLLAMA_API_KEY to be set (or web_search_api_key below).
# Only active when the model also supports tool calling (auto-detected).
# See: https://ollama.com/settings/keys
web_search_enabled = false
# API key for Ollama web search. If empty, falls back to OLLAMA_API_KEY env var.
web_search_api_key = ""
# Maximum number of tool-call iterations per message before stopping the loop.
max_tool_iterations = 10
# NOTE: thinking support, tool calling, and vision are now detected automatically
# from Ollama's /api/show response and no longer require manual configuration.