Skip to content

Commit a6efe55

Browse files
committed
Added siray as a provider
1 parent 30daf78 commit a6efe55

9 files changed

Lines changed: 92 additions & 1 deletion

File tree

.github/siray.png

196 KB
Loading

README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
[![Cavira](/.github/siray.png)](https://www.siray.ai?ytag=affiliate_website_caviraoss_0212)
2+
13
# OpenMemory
24

35
> **Real long-term memory for AI agents. Not RAG. Not a vector DB. Self-hosted, Python + Node.**

packages/openmemory-js/src/core/cfg.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,11 @@ export const env = {
5757
AWS_REGION: process.env.AWS_REGION || "",
5858
AWS_ACCESS_KEY_ID: process.env.AWS_ACCESS_KEY_ID || "",
5959
AWS_SECRET_ACCESS_KEY: process.env.AWS_SECRET_ACCESS_KEY || "",
60+
siray_key: process.env.SIRAY_API_TOKEN || process.env.OM_SIRAY_API_TOKEN || "",
61+
siray_base_url: str(
62+
process.env.OM_SIRAY_BASE_URL,
63+
"https://api.siray.ai/v1",
64+
),
6065
ollama_url: str(
6166
process.env.OLLAMA_URL || process.env.OM_OLLAMA_URL,
6267
"http://localhost:11434",

packages/openmemory-js/src/core/models.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,13 +53,15 @@ const get_defaults = (): model_cfg => ({
5353
openai: "text-embedding-3-small",
5454
gemini: "models/embedding-001",
5555
aws: "amazon.titan-embed-text-v2:0",
56+
siray: "text-embedding-3-small",
5657
local: "all-MiniLM-L6-v2",
5758
},
5859
semantic: {
5960
ollama: "nomic-embed-text",
6061
openai: "text-embedding-3-small",
6162
gemini: "models/embedding-001",
6263
aws: "amazon.titan-embed-text-v2:0",
64+
siray: "text-embedding-3-small",
6365
local: "all-MiniLM-L6-v2",
6466
},
6567
procedural: {

packages/openmemory-js/src/memory/embed.ts

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -142,6 +142,8 @@ async function embed_with_provider(
142142
return await emb_local(t, s);
143143
case "synthetic":
144144
return gen_syn_emb(t, s);
145+
case "siray":
146+
return await emb_siray(t, s);
145147
default:
146148
throw new Error(`Unknown embedding provider: ${provider}`);
147149
}
@@ -400,6 +402,31 @@ async function emb_aws(t: string, s: string): Promise<number[]> {
400402
}
401403
}
402404

405+
async function emb_siray(t: string, s: string): Promise<number[]> {
406+
if (!env.siray_key) throw new Error("Siray key missing");
407+
const m = get_model(s, "siray");
408+
409+
// Use direct fetch since we might need custom handling or just to be safe
410+
// adapting from emb_openai but with siray vars
411+
const r = await fetchWithTimeout(
412+
`${env.siray_base_url.replace(/\/$/, "")}/embeddings`,
413+
{
414+
method: "POST",
415+
headers: {
416+
"content-type": "application/json",
417+
authorization: `Bearer ${env.siray_key}`,
418+
},
419+
body: JSON.stringify({
420+
input: t,
421+
model: m,
422+
// Siray docs didn't specify dimensions support for all models, assume standard if compatible
423+
}),
424+
},
425+
);
426+
if (!r.ok) throw new Error(`Siray: ${r.status}`);
427+
return ((await r.json()) as any).data[0].embedding;
428+
}
429+
403430
async function emb_local(t: string, s: string): Promise<number[]> {
404431
if (!env.local_model_path) {
405432
console.error("[EMBED] Local model missing, using synthetic");
@@ -686,6 +713,16 @@ export const getEmbeddingInfo = () => {
686713
!!env.AWS_SECRET_ACCESS_KEY;
687714
i.batch_api = env.embed_mode === "simple";
688715
i.model = "amazon.titan-embed-text-v2:0";
716+
} else if (env.emb_kind === "siray") {
717+
i.configured = !!env.siray_key;
718+
i.base_url = env.siray_base_url;
719+
i.models = {
720+
episodic: get_model("episodic", "siray"),
721+
semantic: get_model("semantic", "siray"),
722+
procedural: get_model("procedural", "siray"),
723+
emotional: get_model("emotional", "siray"),
724+
reflective: get_model("reflective", "siray"),
725+
};
689726
} else if (env.emb_kind === "ollama") {
690727
i.configured = true;
691728
i.url = env.ollama_url;

packages/openmemory-py/src/openmemory/ai/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,5 +4,6 @@
44
from .gemini import GeminiAdapter
55
from .aws import AwsAdapter
66
from .synthetic import SyntheticAdapter
7+
from .siray import SirayAdapter
78

8-
__all__ = ["AIAdapter", "OpenAIAdapter", "OllamaAdapter", "GeminiAdapter", "AwsAdapter", "SyntheticAdapter"]
9+
__all__ = ["AIAdapter", "OpenAIAdapter", "OllamaAdapter", "GeminiAdapter", "AwsAdapter", "SyntheticAdapter", "SirayAdapter"]
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
import os
2+
from typing import List, Dict, Any, Optional
3+
from openai import AsyncOpenAI
4+
from ..core.config import env
5+
from .adapter import AIAdapter
6+
7+
class SirayAdapter(AIAdapter):
8+
def __init__(self, api_key: str = None, base_url: str = None):
9+
self.api_key = api_key or env.siray_key
10+
self.base_url = base_url or env.siray_base_url
11+
self.client = AsyncOpenAI(api_key=self.api_key, base_url=self.base_url)
12+
13+
async def chat(self, messages: List[Dict[str, str]], model: str = None, **kwargs) -> str:
14+
m = model or env.siray_model or "black-forest-labs/flux-kontext-i2i-pro" # Default per docs example, though likely chat models exist
15+
if not m:
16+
m = "siray-chat-default"
17+
18+
res = await self.client.chat.completions.create(
19+
model=m,
20+
messages=messages,
21+
**kwargs
22+
)
23+
return res.choices[0].message.content or ""
24+
25+
async def embed(self, text: str, model: str = None) -> List[float]:
26+
m = model or "text-embedding-3-small" # Generic default? Or does Siray have own?
27+
res = await self.client.embeddings.create(input=text, model=m)
28+
return res.data[0].embedding
29+
30+
async def embed_batch(self, texts: List[str], model: str = None) -> List[List[float]]:
31+
m = model or "text-embedding-3-small"
32+
res = await self.client.embeddings.create(input=texts, model=m)
33+
return [d.embedding for d in res.data]

packages/openmemory-py/src/openmemory/connectors/langchain.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,15 @@
55
from langchain_core.retrievers import BaseRetriever
66
from langchain_core.documents import Document
77
from langchain_core.callbacks import CallbackManagerForRetrieverRun
8+
from langchain_core.callbacks import CallbackManagerForRetrieverRun
89
except ImportError:
910
BaseChatMessageHistory = object
1011
BaseRetriever = object
12+
BaseMessage = object
13+
HumanMessage = object
14+
AIMessage = object
15+
Document = object
16+
CallbackManagerForRetrieverRun = object
1117

1218
from ..main import Memory
1319

packages/openmemory-py/src/openmemory/core/config.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,11 @@ def get(sec: str, key: str, env_var: str, default: Any) -> Any:
5858
self.aws_region = get("ai", "aws_region", "AWS_REGION", None)
5959
self.aws_access_key_id = get("ai", "aws_access_key_id", "AWS_ACCESS_KEY_ID", None)
6060
self.aws_secret_access_key = get("ai", "aws_secret_access_key", "AWS_SECRET_ACCESS_KEY", None)
61+
62+
self.siray_key = get("ai", "siray_key", "SIRAY_API_TOKEN", "") or os.getenv("OM_SIRAY_API_TOKEN")
63+
self.siray_base_url = get("ai", "siray_base", "OM_SIRAY_BASE_URL", "https://api.siray.ai/v1")
64+
self.siray_model = get("ai", "siray_model", "OM_SIRAY_MODEL", None)
65+
6166
self.vec_dim = int(num(os.getenv("OM_VEC_DIM"), 1536))
6267
self.min_score = num(os.getenv("OM_MIN_SCORE"), 0.3)
6368
self.keyword_boost = num(os.getenv("OM_KEYWORD_BOOST"), 2.5)

0 commit comments

Comments
 (0)