Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 53 additions & 16 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -127,15 +127,23 @@ export const SupermemoryPlugin: Plugin = async (ctx: PluginInput) => {
if (isFirstMessage) {
injectedSessions.add(input.sessionID);

const [profileResult, userMemoriesResult, projectMemoriesListResult] = await Promise.all([
const [profileResult, userMemoriesResult, projectMemoriesListResult, legacyProjectResult] = await Promise.all([
supermemoryClient.getProfile(tags.user, userMessage),
supermemoryClient.searchMemories(userMessage, tags.user),
supermemoryClient.listMemories(tags.project, CONFIG.maxProjectMemories),
tags.legacyProject
? supermemoryClient.listMemories(tags.legacyProject, CONFIG.maxProjectMemories)
: Promise.resolve({ success: true, memories: [] } as const),
]);

const profile = profileResult.success ? profileResult : null;
const userMemories = userMemoriesResult.success ? userMemoriesResult : { results: [] };
const projectMemoriesList = projectMemoriesListResult.success ? projectMemoriesListResult : { memories: [] };
const currentMemories = projectMemoriesListResult.success ? (projectMemoriesListResult.memories || []) : [];
const legacyMemories = legacyProjectResult.success ? (legacyProjectResult.memories || []) : [];
const seenIds = new Set(currentMemories.map((m: any) => m.id));
const mergedMemories = [...currentMemories, ...legacyMemories.filter((m: any) => !seenIds.has(m.id))]
.slice(0, CONFIG.maxProjectMemories);
const projectMemoriesList = { success: true, memories: mergedMemories };

const projectMemories = {
results: (projectMemoriesList.memories || []).map((m: any) => ({
Expand Down Expand Up @@ -338,22 +346,28 @@ export const SupermemoryPlugin: Plugin = async (ctx: PluginInput) => {
}

if (scope === "project") {
const result = await supermemoryClient.searchMemories(
args.query,
tags.project
);
const [result, legacyResult] = await Promise.all([
supermemoryClient.searchMemories(args.query, tags.project),
tags.legacyProject
? supermemoryClient.searchMemories(args.query, tags.legacyProject)
: Promise.resolve({ success: true, results: [] } as const),
]);
if (!result.success) {
return JSON.stringify({
success: false,
error: result.error || "Failed to search memories",
});
}
return formatSearchResults(args.query, scope, result, args.limit);
const merged = mergeSearchResults(result, legacyResult);
return formatSearchResults(args.query, scope, merged, args.limit);
}

const [userResult, projectResult] = await Promise.all([
const [userResult, projectResult, legacyProjectResult] = await Promise.all([
supermemoryClient.searchMemories(args.query, tags.user),
supermemoryClient.searchMemories(args.query, tags.project),
tags.legacyProject
? supermemoryClient.searchMemories(args.query, tags.legacyProject)
: Promise.resolve({ success: true, results: [] } as const),
]);

if (!userResult.success || !projectResult.success) {
Expand All @@ -363,12 +377,14 @@ export const SupermemoryPlugin: Plugin = async (ctx: PluginInput) => {
});
}

const mergedProject = mergeSearchResults(projectResult, legacyProjectResult);

const combined = [
...(userResult.results || []).map((r) => ({
...r,
scope: "user" as const,
})),
...(projectResult.results || []).map((r) => ({
...(mergedProject.results || []).map((r) => ({
...r,
scope: "project" as const,
})),
Expand Down Expand Up @@ -414,11 +430,15 @@ export const SupermemoryPlugin: Plugin = async (ctx: PluginInput) => {
const limit = args.limit || 20;
const containerTag =
scope === "user" ? tags.user : tags.project;

const result = await supermemoryClient.listMemories(
containerTag,
limit
);
const legacyTag =
scope === "project" ? tags.legacyProject : undefined;

const [result, legacyListResult] = await Promise.all([
supermemoryClient.listMemories(containerTag, limit),
legacyTag
? supermemoryClient.listMemories(legacyTag, limit)
: Promise.resolve({ success: true, memories: [] } as const),
]);

if (!result.success) {
return JSON.stringify({
Expand All @@ -427,7 +447,11 @@ export const SupermemoryPlugin: Plugin = async (ctx: PluginInput) => {
});
}

const memories = result.memories || [];
const currentMems = result.memories || [];
const legacyMems = legacyListResult.success ? (legacyListResult.memories || []) : [];
const listSeenIds = new Set(currentMems.map((m: any) => m.id));
const memories = [...currentMems, ...legacyMems.filter((m: any) => !listSeenIds.has(m.id))]
.slice(0, limit);
return JSON.stringify({
success: true,
scope,
Expand Down Expand Up @@ -492,10 +516,23 @@ export const SupermemoryPlugin: Plugin = async (ctx: PluginInput) => {
};
};

type SearchResult = { id: string; memory?: string; chunk?: string; similarity?: number };
type SearchResponse = { success?: boolean; results?: SearchResult[] };

function mergeSearchResults(primary: SearchResponse, legacy: SearchResponse): SearchResponse {
const primaryResults = primary.results || [];
const legacyResults = legacy.success ? (legacy.results || []) : [];
const seenIds = new Set(primaryResults.map((r) => r.id));
return {
...primary,
results: [...primaryResults, ...legacyResults.filter((r) => !seenIds.has(r.id))],
};
}

function formatSearchResults(
query: string,
scope: string | undefined,
results: { results?: Array<{ id: string; memory?: string; chunk?: string; similarity?: number }> },
results: SearchResponse,
limit?: number
): string {
const memoryResults = results.results || [];
Expand Down
17 changes: 13 additions & 4 deletions src/services/compaction.ts
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ export interface CompactionContext {

export function createCompactionHook(
ctx: CompactionContext,
tags: { user: string; project: string },
tags: { user: string; project: string; legacyProject?: string },
options?: CompactionOptions
) {
const state: CompactionState = {
Expand All @@ -262,9 +262,18 @@ export function createCompactionHook(

async function fetchProjectMemoriesForCompaction(): Promise<string[]> {
try {
const result = await supermemoryClient.listMemories(tags.project, CONFIG.maxProjectMemories);
const memories = result.memories || [];
return memories.map((m: any) => m.summary || m.content || "").filter(Boolean);
const [result, legacyResult] = await Promise.all([
supermemoryClient.listMemories(tags.project, CONFIG.maxProjectMemories),
tags.legacyProject
? supermemoryClient.listMemories(tags.legacyProject, CONFIG.maxProjectMemories)
: Promise.resolve({ success: true, memories: [] } as const),
]);
const currentMems = result.memories || [];
const legacyMems = legacyResult.success ? (legacyResult.memories || []) : [];
const seenIds = new Set(currentMems.map((m: any) => m.id));
const allMemories = [...currentMems, ...legacyMems.filter((m: any) => !seenIds.has(m.id))]
.slice(0, CONFIG.maxProjectMemories);
return allMemories.map((m: any) => m.summary || m.content || "").filter(Boolean);
} catch (err) {
log("[compaction] failed to fetch project memories", { error: String(err) });
return [];
Expand Down
65 changes: 63 additions & 2 deletions src/services/tags.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,43 @@ export function getGitEmail(): string | null {
}
}

/**
* Normalize a git remote URL to a canonical form so that SSH, HTTPS,
* and with/without `.git` suffix all produce the same identifier.
*
* Examples:
* git@github.com:user/repo.git → github.com/user/repo
* https://github.com/user/repo → github.com/user/repo
* git@gitlab.com:org/sub/repo.git → gitlab.com/org/sub/repo
*/
export function normalizeGitUrl(url: string): string {
return url
.replace(/^[a-z+]+:\/\//, "") // strip protocol (https://, git://, ssh://)
.replace(/^[^@]+@/, "") // strip user@ prefix (git@, user@)
.replace(/:(\d+)\//, "/$1/") // preserve port numbers (e.g. :8080/)

This comment was marked as outdated.

Copy link
Copy Markdown
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Evaluated and this is not a real issue. The port number is part of the server identity — if a server runs on port 2222, all clones will include :2222 and they normalize consistently to gitlab.local/2222/user/repo.

The only failure case would be one machine using ssh://git@gitlab.local:22/user/repo.git (explicitly specifying the default port) while another uses ssh://git@gitlab.local/user/repo.git. This is vanishingly rare — no standard git tooling generates URLs with the default port.

Stripping port numbers would actually be worse: it would incorrectly unify repos from different servers on the same host but different ports (e.g., a staging and production GitLab on the same machine).

.replace(":", "/") // SSH colon to slash (github.com:user → github.com/user)
.replace(/\.git$/, "") // strip trailing .git
.replace(/\/+$/, ""); // strip trailing slashes
}

/**
* Get the git remote URL for the given directory.
* This provides a stable, cross-machine identifier for projects.
* Returns null if not in a git repo or no remote configured.
*/
export function getGitRemoteUrl(directory: string): string | null {
try {
const remoteUrl = execSync("git config --get remote.origin.url", {
cwd: directory,
encoding: "utf-8",
stdio: ["pipe", "pipe", "pipe"],
}).trim();
return remoteUrl || null;
} catch {
return null;
}
}

export function getUserTag(): string {
// If userContainerTag is explicitly set, use it
if (CONFIG.userContainerTag) {
Expand All @@ -36,13 +73,37 @@ export function getProjectTag(directory: string): string {
return CONFIG.projectContainerTag;
}

// Otherwise, auto-generate based on containerTagPrefix
// Try to use git remote URL as a stable cross-machine project identifier
// This allows the same project on different machines to share memories
const remoteUrl = getGitRemoteUrl(directory);
if (remoteUrl) {
return `${CONFIG.containerTagPrefix}_project_${sha256(normalizeGitUrl(remoteUrl))}`;
}
Comment on lines +78 to +81

This comment was marked as outdated.

Copy link
Copy Markdown
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Valid finding. Existing users have memories stored under sha256(directory). Switching to sha256(normalizeGitUrl(remoteUrl)) orphans those memories on upgrade — not just when adding a remote after creation.

Fixed in a88b0bd: all read operations (search, list, compaction context fetch) now query both the canonical remote-based tag and the legacy directory-based tag, deduplicating by memory ID. Writes go only to the new tag, so memories gradually migrate forward.


// Fall back to directory path hash (machine-specific)
return `${CONFIG.containerTagPrefix}_project_${sha256(directory)}`;
}

/**
* Returns the legacy directory-hash project tag if it differs from the
* current (remote-based) tag. Used to query old memories created before
* the git-remote-based tagging was introduced.
*/
export function getLegacyProjectTag(directory: string): string | undefined {
if (CONFIG.projectContainerTag) return undefined;

const remoteUrl = getGitRemoteUrl(directory);
if (!remoteUrl) return undefined;

// A remote exists, so the canonical tag is remote-based.
// Return the old directory-based tag for migration reads.
return `${CONFIG.containerTagPrefix}_project_${sha256(directory)}`;
}

export function getTags(directory: string): { user: string; project: string } {
export function getTags(directory: string): { user: string; project: string; legacyProject?: string } {
return {
user: getUserTag(),
project: getProjectTag(directory),
legacyProject: getLegacyProjectTag(directory),
};
}