Skip to content

Commit baca3da

Browse files
authored
Merge pull request #7 from MinimalFuture/feat-super-agent
feat: super single agent framework
2 parents 5b00812 + 0747fb1 commit baca3da

43 files changed

Lines changed: 4903 additions & 317 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

agentmesh/__init__.py

Lines changed: 41 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,4 +19,44 @@
1919
# Setup logging when the package is imported
2020
setup_logging()
2121

22-
__all__ = ['AgentTeam', 'Agent', 'LLMModel', 'Task', 'TeamResult']
22+
__all__ = ['AgentTeam', 'Agent', 'LLMModel', 'Task', 'TeamResult', 'set_workspace', 'get_workspace']
23+
24+
# Global workspace configuration
25+
_global_workspace_root = None
26+
27+
28+
def set_workspace(workspace_root: str):
29+
"""
30+
Set global workspace root for all agents
31+
32+
This should be called once at the start of your application.
33+
All agents will use this workspace unless overridden.
34+
35+
Args:
36+
workspace_root: Path to workspace root (e.g., "~/my_agents")
37+
38+
Example:
39+
>>> import agentmesh
40+
>>> agentmesh.set_workspace("~/my_agents")
41+
>>> # Now all agents will use ~/my_agents as workspace root
42+
"""
43+
global _global_workspace_root
44+
_global_workspace_root = os.path.expanduser(workspace_root)
45+
46+
# Also update memory config
47+
from agentmesh.memory import MemoryConfig, set_global_memory_config
48+
config = MemoryConfig(workspace_root=_global_workspace_root)
49+
set_global_memory_config(config)
50+
51+
52+
def get_workspace() -> str:
53+
"""
54+
Get current global workspace root
55+
56+
Returns:
57+
Current workspace root path, or default "~/agentmesh"
58+
"""
59+
global _global_workspace_root
60+
if _global_workspace_root is None:
61+
return os.path.expanduser("~/agentmesh")
62+
return _global_workspace_root

agentmesh/memory/__init__.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
"""
2+
Memory module for AgentMesh
3+
4+
Provides long-term memory capabilities with hybrid search (vector + keyword)
5+
"""
6+
7+
from agentmesh.memory.manager import MemoryManager
8+
from agentmesh.memory.config import MemoryConfig, get_default_memory_config, set_global_memory_config
9+
10+
__all__ = ['MemoryManager', 'MemoryConfig', 'get_default_memory_config', 'set_global_memory_config']

agentmesh/memory/chunker.py

Lines changed: 139 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,139 @@
1+
"""
2+
Text chunking utilities for memory
3+
4+
Splits text into chunks with token limits and overlap
5+
"""
6+
7+
from typing import List, Tuple
8+
from dataclasses import dataclass
9+
10+
11+
@dataclass
12+
class TextChunk:
13+
"""Represents a text chunk with line numbers"""
14+
text: str
15+
start_line: int
16+
end_line: int
17+
18+
19+
class TextChunker:
20+
"""Chunks text by line count with token estimation"""
21+
22+
def __init__(self, max_tokens: int = 500, overlap_tokens: int = 50):
23+
"""
24+
Initialize chunker
25+
26+
Args:
27+
max_tokens: Maximum tokens per chunk
28+
overlap_tokens: Overlap tokens between chunks
29+
"""
30+
self.max_tokens = max_tokens
31+
self.overlap_tokens = overlap_tokens
32+
# Rough estimation: ~4 chars per token for English/Chinese mixed
33+
self.chars_per_token = 4
34+
35+
def chunk_text(self, text: str) -> List[TextChunk]:
36+
"""
37+
Chunk text into overlapping segments
38+
39+
Args:
40+
text: Input text to chunk
41+
42+
Returns:
43+
List of TextChunk objects
44+
"""
45+
if not text.strip():
46+
return []
47+
48+
lines = text.split('\n')
49+
chunks = []
50+
51+
max_chars = self.max_tokens * self.chars_per_token
52+
overlap_chars = self.overlap_tokens * self.chars_per_token
53+
54+
current_chunk = []
55+
current_chars = 0
56+
start_line = 1
57+
58+
for i, line in enumerate(lines, start=1):
59+
line_chars = len(line)
60+
61+
# If single line exceeds max, split it
62+
if line_chars > max_chars:
63+
# Save current chunk if exists
64+
if current_chunk:
65+
chunks.append(TextChunk(
66+
text='\n'.join(current_chunk),
67+
start_line=start_line,
68+
end_line=i - 1
69+
))
70+
current_chunk = []
71+
current_chars = 0
72+
73+
# Split long line into multiple chunks
74+
for sub_chunk in self._split_long_line(line, max_chars):
75+
chunks.append(TextChunk(
76+
text=sub_chunk,
77+
start_line=i,
78+
end_line=i
79+
))
80+
81+
start_line = i + 1
82+
continue
83+
84+
# Check if adding this line would exceed limit
85+
if current_chars + line_chars > max_chars and current_chunk:
86+
# Save current chunk
87+
chunks.append(TextChunk(
88+
text='\n'.join(current_chunk),
89+
start_line=start_line,
90+
end_line=i - 1
91+
))
92+
93+
# Start new chunk with overlap
94+
overlap_lines = self._get_overlap_lines(current_chunk, overlap_chars)
95+
current_chunk = overlap_lines + [line]
96+
current_chars = sum(len(l) for l in current_chunk)
97+
start_line = i - len(overlap_lines)
98+
else:
99+
# Add line to current chunk
100+
current_chunk.append(line)
101+
current_chars += line_chars
102+
103+
# Save last chunk
104+
if current_chunk:
105+
chunks.append(TextChunk(
106+
text='\n'.join(current_chunk),
107+
start_line=start_line,
108+
end_line=len(lines)
109+
))
110+
111+
return chunks
112+
113+
def _split_long_line(self, line: str, max_chars: int) -> List[str]:
114+
"""Split a single long line into multiple chunks"""
115+
chunks = []
116+
for i in range(0, len(line), max_chars):
117+
chunks.append(line[i:i + max_chars])
118+
return chunks
119+
120+
def _get_overlap_lines(self, lines: List[str], target_chars: int) -> List[str]:
121+
"""Get last few lines that fit within target_chars for overlap"""
122+
overlap = []
123+
chars = 0
124+
125+
for line in reversed(lines):
126+
line_chars = len(line)
127+
if chars + line_chars > target_chars:
128+
break
129+
overlap.insert(0, line)
130+
chars += line_chars
131+
132+
return overlap
133+
134+
def chunk_markdown(self, text: str) -> List[TextChunk]:
135+
"""
136+
Chunk markdown text while respecting structure
137+
(For future enhancement: respect markdown sections)
138+
"""
139+
return self.chunk_text(text)

agentmesh/memory/config.py

Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,111 @@
1+
"""
2+
Memory configuration module
3+
4+
Provides global memory configuration with simplified workspace structure
5+
"""
6+
7+
import os
8+
from dataclasses import dataclass, field
9+
from typing import Optional, List
10+
from pathlib import Path
11+
12+
13+
@dataclass
14+
class MemoryConfig:
15+
"""Configuration for memory storage and search"""
16+
17+
# Storage paths (default: ~/agentmesh)
18+
workspace_root: str = field(default_factory=lambda: os.path.expanduser("~/agentmesh"))
19+
20+
# Embedding config
21+
embedding_provider: str = "openai" # "openai" | "local"
22+
embedding_model: str = "text-embedding-3-small"
23+
embedding_dim: int = 1536
24+
25+
# Chunking config
26+
chunk_max_tokens: int = 500
27+
chunk_overlap_tokens: int = 50
28+
29+
# Search config
30+
max_results: int = 10
31+
min_score: float = 0.3
32+
33+
# Hybrid search weights
34+
vector_weight: float = 0.7
35+
keyword_weight: float = 0.3
36+
37+
# Memory sources
38+
sources: List[str] = field(default_factory=lambda: ["memory", "session"])
39+
40+
# Sync config
41+
enable_auto_sync: bool = True
42+
sync_on_search: bool = True
43+
44+
def get_workspace(self) -> Path:
45+
"""Get workspace root directory"""
46+
return Path(self.workspace_root)
47+
48+
def get_memory_dir(self) -> Path:
49+
"""Get memory files directory"""
50+
return self.get_workspace() / "memory"
51+
52+
def get_db_path(self) -> Path:
53+
"""Get SQLite database path for long-term memory index"""
54+
index_dir = self.get_memory_dir() / "long-term"
55+
index_dir.mkdir(parents=True, exist_ok=True)
56+
return index_dir / "index.db"
57+
58+
def get_skills_dir(self) -> Path:
59+
"""Get skills directory"""
60+
return self.get_workspace() / "skills"
61+
62+
def get_agent_workspace(self, agent_name: Optional[str] = None) -> Path:
63+
"""
64+
Get workspace directory for an agent
65+
66+
Args:
67+
agent_name: Optional agent name (not used in current implementation)
68+
69+
Returns:
70+
Path to workspace directory
71+
"""
72+
return self.get_workspace()
73+
74+
75+
# Global memory configuration
76+
_global_memory_config: Optional[MemoryConfig] = None
77+
78+
79+
def get_default_memory_config() -> MemoryConfig:
80+
"""
81+
Get the global memory configuration.
82+
If not set, returns a default configuration.
83+
84+
Returns:
85+
MemoryConfig instance
86+
"""
87+
global _global_memory_config
88+
if _global_memory_config is None:
89+
_global_memory_config = MemoryConfig()
90+
return _global_memory_config
91+
92+
93+
def set_global_memory_config(config: MemoryConfig):
94+
"""
95+
Set the global memory configuration.
96+
This should be called before creating any MemoryManager instances.
97+
98+
Args:
99+
config: MemoryConfig instance to use globally
100+
101+
Example:
102+
>>> from agentmesh.memory import MemoryConfig, set_global_memory_config
103+
>>> config = MemoryConfig(
104+
... workspace_root="~/my_agents",
105+
... embedding_provider="openai",
106+
... vector_weight=0.8
107+
... )
108+
>>> set_global_memory_config(config)
109+
"""
110+
global _global_memory_config
111+
_global_memory_config = config

0 commit comments

Comments
 (0)