@@ -117,17 +117,13 @@ pip install synaptic-memory[all] # 전부
117117
118118## Quick Start
119119
120- ### 기본 — zero-dep
120+ ### 1. In-memory — zero-dep, 바로 시작
121121
122122``` python
123- from synaptic.backends.memory import MemoryBackend
124- from synaptic import SynapticGraph, ActivityTracker, build_agent_ontology
123+ from synaptic import SynapticGraph, ActivityTracker
125124
126125async def main ():
127- backend = MemoryBackend()
128- await backend.connect()
129-
130- graph = SynapticGraph(backend, ontology = build_agent_ontology())
126+ graph = SynapticGraph.memory()
131127 tracker = ActivityTracker(graph)
132128
133129 # 과거 경험 검색 (intent 자동 추론)
@@ -149,68 +145,66 @@ async def main():
149145 content = " Zero downtime 달성" ,
150146 success = True ,
151147 )
152- await backend.close()
153148```
154149
155- ### 자동 온톨로지 — 규칙 기반 (무료)
150+ ### 2. SQLite — 경량 프로덕션
156151
157152``` python
158- from synaptic import SynapticGraph, RuleBasedClassifier, RuleBasedRelationDetector
153+ from synaptic import SynapticGraph
159154
160- graph = SynapticGraph(
161- backend,
162- classifier = RuleBasedClassifier(),
163- relation_detector = RuleBasedRelationDetector(),
164- )
155+ graph = SynapticGraph.sqlite(" knowledge.db" )
156+ await graph.backend.connect()
165157
158+ # RuleBasedClassifier + RelationDetector + Ontology 자동 포함
166159# kind, tags 지정 없이 넣기만 하면 자동 분류 + 자동 관계
167160await graph.add(" 환불 정책" , " 7일 이내 환불 가능..." ) # → kind=RULE 자동
168161```
169162
170- ### 자동 온톨로지 — LLM 기반 (최고 품질)
163+ ### 3. Full — LLM 분류 + 임베딩 + 관계 탐지
171164
172165``` python
173- from synaptic import (
174- SynapticGraph, OllamaLLMProvider, OllamaEmbeddingProvider,
175- LLMClassifier, LLMRelationDetector,
176- RuleBasedClassifier, RuleBasedRelationDetector,
177- )
178-
179- llm = OllamaLLMProvider(model = " qwen3:0.6b" )
180-
181- graph = SynapticGraph(
182- backend,
183- classifier = LLMClassifier(llm, fallback = RuleBasedClassifier()),
184- relation_detector = LLMRelationDetector(llm, fallback = RuleBasedRelationDetector()),
185- embedder = OllamaEmbeddingProvider(model = " qwen3-embedding:0.6b" ),
166+ from synaptic import SynapticGraph
167+ from synaptic.backends.sqlite import SQLiteBackend
168+ from synaptic.extensions.llm_provider import OllamaLLMProvider
169+
170+ graph = SynapticGraph.full(
171+ SQLiteBackend(" knowledge.db" ),
172+ llm = OllamaLLMProvider(model = " qwen3:0.6b" ),
173+ embed_api_base = " http://localhost:8080/v1" ,
174+ embed_model = " BAAI/bge-m3" ,
186175)
176+ await graph.backend.connect()
187177
188178# LLM이 kind 분류 + tags + 검색 키워드 + 검색 시나리오 자동 생성
189179# embedding에 search_keywords 포함 → 벡터 검색 정확도 향상
190180# 기존 노드와 의미적 관계 자동 탐지 (DEPENDS_ON, LEARNED_FROM 등)
191181node = await graph.add(" 결제 장애 사후 분석" , " PG사 API 타임아웃..." )
192182```
193183
194- ### Auto-Embedding (vLLM / llama.cpp / Ollama)
184+ ### 4. Custom — 직접 조합
185+
186+ 팩토리 함수 대신 각 컴포넌트를 직접 선택할 수도 있다:
195187
196188``` python
197189from synaptic import SynapticGraph, OpenAIEmbeddingProvider
190+ from synaptic.backends.sqlite import SQLiteBackend
198191
199- embedder = OpenAIEmbeddingProvider (
200- " http://gpu-server:8080/v1 " ,
201- model = " BAAI/bge-m3" ,
192+ graph = SynapticGraph (
193+ SQLiteBackend( " knowledge.db " ) ,
194+ embedder = OpenAIEmbeddingProvider( " http://gpu-server:8080/v1 " , model = " BAAI/bge-m3" ) ,
202195)
203- graph = SynapticGraph( backend, embedder = embedder )
196+ await graph. backend.connect( )
204197
205198# 자동: title+content → 벡터 생성 → 저장
206199await graph.add(" 배포 전략" , " Blue-green 배포로 zero downtime 달성" )
207200# 자동: 쿼리 → 벡터 생성 → FTS + vector 동시 검색
208201result = await graph.search(" 배포 방식" )
209202```
210203
211- ### Scale: CompositeBackend
204+ ### 5. Scale — CompositeBackend
212205
213206``` python
207+ from synaptic import SynapticGraph
214208from synaptic.backends.composite import CompositeBackend
215209from synaptic.backends.neo4j import Neo4jBackend
216210from synaptic.backends.qdrant import QdrantBackend
@@ -222,7 +216,8 @@ composite = CompositeBackend(
222216 blob = MinIOBackend(" localhost:9000" , access_key = " minio" , secret_key = " secret" ),
223217)
224218await composite.connect()
225- graph = SynapticGraph(composite, embedder = embedder)
219+
220+ graph = SynapticGraph.full(composite, embed_api_base = " http://gpu-server:8080/v1" )
226221
227222# 내부 라우팅:
228223# - embedding → Qdrant, content > 100KB → MinIO, 나머지 → Neo4j
0 commit comments