Fix: UI fixes for space facts page, changed extension-search api

This commit is contained in:
Harshith Mullapudi 2025-08-22 21:08:38 +05:30
parent 8408964f73
commit b5fe67fad3
38 changed files with 1538 additions and 68405 deletions

View File

@ -1,4 +1,4 @@
VERSION=0.1.16
VERSION=0.1.17
# Nest run in docker, change host to database container name
DB_HOST=localhost

1
.gitignore vendored
View File

@ -32,6 +32,7 @@ dist
npm-debug.log*
yarn-debug.log*
yarn-error.log*
benchmarks/
# Misc
.DS_Store

View File

@ -0,0 +1,103 @@
import { z } from "zod";
import { openai } from "@ai-sdk/openai";
import { type CoreMessage, generateText, tool } from "ai";
import { logger } from "~/services/logger.service";
import { SearchService } from "~/services/search.server";
// Input schema for the agent
export const SearchMemoryAgentInput = z.object({
userInput: z.string().min(1, "User input is required"),
userId: z.string().min(1, "User ID is required"),
context: z
.string()
.optional()
.describe("Additional context about the user's current work"),
});
/**
* Search Memory Agent - Designed to find relevant context from user's memory
*
* This agent searches the user's memory using a searchMemory tool, retrieves relevant
* facts and episodes, then summarizes them into a concise, relevant context summary.
*/
export class SearchMemoryAgent {
private model = openai("gpt-4o");
private searchService = new SearchService();
async generateContextSummary(
input: z.infer<typeof SearchMemoryAgentInput>,
): Promise<string> {
const { userInput, userId, context } = SearchMemoryAgentInput.parse(input);
// Define the searchMemory tool that actually calls the search service
const searchMemoryTool = tool({
description:
"Search the user's memory for relevant facts and episodes based on a query",
parameters: z.object({
query: z.string().describe("Search query to find relevant information"),
}),
execute: async ({ query }) => {
try {
const searchResult = await this.searchService.search(query, userId);
return {
facts: searchResult.facts || [],
episodes: searchResult.episodes || [],
};
} catch (error) {
logger.error(`SearchMemory tool error: ${error}`);
return {
facts: [],
episodes: [],
};
}
},
});
const messages: CoreMessage[] = [
{
role: "system",
content: `You are a specialized memory search and summarization agent. Your job is to:
1. First, use the searchMemory tool to find relevant information from the user's memory based on their input
2. Then, analyze the retrieved facts and episodes to create a concise, relevant summary
You have access to a searchMemory tool that can search the user's knowledge base. Use this tool with relevant search queries to find information that would help answer their question.
After retrieving the information, provide a concise summary (2-4 sentences) that highlights the most relevant context for answering their question. Focus on:
- Key facts that directly relate to their question
- Important background information or decisions
- Relevant examples or past experiences
- Critical context that would help provide a good answer
If no relevant information is found, provide a brief statement indicating that.`,
},
{
role: "user",
content: `User input: "${userInput}"${context ? `\n\nAdditional context: ${context}` : ""}\n\nPlease search my memory for relevant information and provide a concise summary of the most important context for this question.`,
},
];
try {
const result = await generateText({
model: this.model,
messages,
tools: {
searchMemory: searchMemoryTool,
},
maxSteps: 5,
temperature: 0.3,
maxTokens: 600,
});
return result.text.trim();
} catch (error) {
logger.error(`SearchMemoryAgent error: ${error}`);
return `Context related to: ${userInput}. Looking for relevant background information, previous discussions, and related concepts that would help provide a comprehensive answer.`;
}
}
}
// Export a singleton instance
export const searchMemoryAgent = new SearchMemoryAgent();

View File

@ -37,3 +37,19 @@ export function getIcon(icon: IconType) {
return ICON_MAPPING["integration"];
}
export const getIconForAuthorise = (name: string, image?: string) => {
if (image) {
return <img src={image} alt={name} className="h-[40px] w-[40px] rounded" />;
}
const lowerName = name.toLowerCase();
if (lowerName in ICON_MAPPING) {
const IconComponent = ICON_MAPPING[lowerName as IconType];
return <IconComponent size={40} />;
}
return <LayoutGrid size={40} />;
};

View File

@ -90,7 +90,7 @@ export function LogDetails({
{error && (
<div className="mb-4">
<h3 className="mb-2 text-sm font-medium">Error Details</h3>
<div className="rounded-md border border-red-200 p-3">
<div className="bg-destructive/10 rounded-md p-3">
<div className="flex items-start gap-2 text-red-600">
<AlertCircle className="mt-0.5 h-4 w-4 flex-shrink-0" />
<p className="text-sm break-words whitespace-pre-wrap">

View File

@ -16,8 +16,8 @@ import {
AlertDialogHeader,
AlertDialogTitle,
} from "../ui/alert-dialog";
import { useState } from "react";
import { useFetcher } from "@remix-run/react";
import { useState, useEffect } from "react";
import { redirect, useFetcher } from "@remix-run/react";
interface LogOptionsProps {
id: string;
@ -25,7 +25,7 @@ interface LogOptionsProps {
export const LogOptions = ({ id }: LogOptionsProps) => {
const [deleteDialogOpen, setDeleteDialogOpen] = useState(false);
const deleteFetcher = useFetcher();
const deleteFetcher = useFetcher<{ success: boolean }>();
const handleDelete = () => {
deleteFetcher.submit(
@ -39,10 +39,21 @@ export const LogOptions = ({ id }: LogOptionsProps) => {
setDeleteDialogOpen(false);
};
useEffect(() => {
if (deleteFetcher.state === "idle" && deleteFetcher.data?.success) {
redirect(`/home/logs`);
}
}, [deleteFetcher.state, deleteFetcher.data]);
return (
<>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<DropdownMenuTrigger
asChild
onClick={(e) => {
e.stopPropagation();
}}
>
<Button
variant="ghost"
className="mr-0.5 h-8 shrink items-center justify-between gap-2 px-1.5"
@ -54,7 +65,11 @@ export const LogOptions = ({ id }: LogOptionsProps) => {
</DropdownMenuTrigger>
<DropdownMenuContent align="end">
<DropdownMenuItem onClick={() => setDeleteDialogOpen(true)}>
<DropdownMenuItem
onClick={(e) => {
setDeleteDialogOpen(true);
}}
>
<Button variant="link" size="sm" className="gap-2 rounded">
<Trash size={15} /> Delete
</Button>

View File

@ -11,6 +11,7 @@ interface LogTextCollapseProps {
logData: any;
log: LogItem;
id: string;
reset?: () => void;
}
const getStatusColor = (status: string) => {
@ -66,12 +67,14 @@ export function LogTextCollapse({
className={cn(
"group-hover:bg-grayAlpha-100 flex min-w-[0px] shrink grow items-start gap-2 rounded-md px-4",
)}
onClick={() => setDialogOpen(true)}
>
<div
className={cn(
"border-border flex w-full min-w-[0px] shrink flex-col border-b py-1",
)}
onClick={() => {
setDialogOpen(true);
}}
>
<div className="flex w-full items-center justify-between gap-4">
<div className="inline-flex min-h-[24px] min-w-[0px] shrink cursor-pointer items-center justify-start">
@ -103,7 +106,9 @@ export function LogTextCollapse({
{new Date(log.time).toLocaleString()}
</div>
<LogOptions id={id} />
<div onClick={(e) => e.stopPropagation()}>
<LogOptions id={id} />
</div>
</div>
</div>
</div>

View File

@ -17,6 +17,7 @@ interface VirtualLogsListProps {
loadMore: () => void;
isLoading: boolean;
height?: number;
reset?: () => void;
}
function LogItemRenderer(

View File

@ -19,6 +19,9 @@ export function SpaceFactCard({ fact }: SpaceFactCardProps) {
const displayText = fact.fact;
const recallCount =
(fact.recallCount?.high ?? 0) + (fact.recallCount?.low ?? 0);
return (
<>
<div className="flex w-full items-center px-5 pr-2">
@ -37,9 +40,7 @@ export function SpaceFactCard({ fact }: SpaceFactCardProps) {
<div className={cn("truncate text-left")}>{displayText}</div>
</div>
<div className="text-muted-foreground flex shrink-0 items-center justify-end text-xs">
{fact.recallCount !== undefined && (
<span>Recalled: {fact.recallCount} times</span>
)}
{!!recallCount && <span>Recalled: {recallCount} times</span>}
<Badge variant="secondary" className="rounded text-xs">
<Calendar className="h-3 w-3" />
{formatDate(fact.validAt)}

View File

@ -1,103 +1,37 @@
import { z } from "zod";
import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
import { SearchService } from "~/services/search.server";
import { makeModelCall } from "~/lib/model.server";
import { json } from "@remix-run/node";
import type { CoreMessage } from "ai";
import { searchMemoryAgent } from "~/agents/searchMemoryAgent.server";
export const ExtensionSearchBodyRequest = z.object({
input: z.string().min(1, "Input text is required"),
limit: z.number().optional().default(20),
maxBfsDepth: z.number().optional(),
includeInvalidated: z.boolean().optional(),
entityTypes: z.array(z.string()).optional(),
scoreThreshold: z.number().optional(),
minResults: z.number().optional(),
});
const searchService = new SearchService();
/**
* Generate multiple search queries from user input using LLM
* Generate context summary from user input using SearchMemoryAgent
*/
async function generateSearchQueries(userInput: string): Promise<string[]> {
const messages: CoreMessage[] = [
{
role: "system",
content: `You are my personal memory assistant. I'm writing something and need you to help me recall relevant information from my past conversations, notes, and experiences that might be useful for what I'm currently working on.
Based on what I'm typing, think about what information from my memory would be most helpful:
- What have I discussed before that relates to this topic?
- What context, decisions, or insights might I need to remember?
- What related work, people, or concepts should I be aware of?
- What problems or solutions have I encountered that are similar?
- What background information would help me with this task?
Generate 3-5 specific search queries that will help me find the most relevant memories and context for my current work. Think like you're helping me remember things I might have forgotten or overlooked.
Return the JSON array of strings wrapped in <output></output> tags. Each string should be a search query.
Format: <output>["query1", "query2", "query3"]</output>
Example input: "working on the user authentication feature"
Example output: ["user authentication implementation", "login flow discussion", "authentication security concerns", "user session management", "auth token handling"]`,
},
{
role: "user",
content: userInput,
},
];
async function generateContextSummary(
userInput: string,
userId: string,
): Promise<string> {
try {
const response = await makeModelCall(
false,
messages,
() => {}, // onFinish callback
{ temperature: 0.3 }
);
const summary = await searchMemoryAgent.generateContextSummary({
userInput,
userId,
});
// Extract content from <output> tags and parse JSON
const outputMatch = (response as string).match(/<output>(.*?)<\/output>/s);
if (!outputMatch) {
throw new Error("No output tags found in LLM response");
}
const queries = JSON.parse(outputMatch[1].trim());
// Validate that we got an array of strings
if (!Array.isArray(queries) || !queries.every(q => typeof q === 'string')) {
throw new Error("Invalid response format from LLM");
}
return queries.slice(0, 5); // Limit to max 5 queries
return summary;
} catch (error) {
console.error("Error generating search queries:", error);
// Fallback: use the original input as a single query
return [userInput];
console.error("Error generating context with agent:", error);
// Fallback: use simple context description
return `Context related to: ${userInput}. Looking for relevant background information, previous discussions, and related concepts that would help provide a comprehensive answer.`;
}
}
/**
* Deduplicate facts and episodes from multiple search results
*/
function deduplicateResults(results: Array<{ episodes: string[]; facts: string[] }>) {
const uniqueFacts = new Set<string>();
const uniqueEpisodes = new Set<string>();
for (const result of results) {
result.facts.forEach(fact => uniqueFacts.add(fact));
result.episodes.forEach(episode => uniqueEpisodes.add(episode));
}
return {
facts: Array.from(uniqueFacts),
episodes: Array.from(uniqueEpisodes),
};
}
const { action, loader } = createActionApiRoute(
{
body: ExtensionSearchBodyRequest,
method: "POST",
allowJWT: true,
authorization: {
action: "search",
@ -105,31 +39,15 @@ const { action, loader } = createActionApiRoute(
corsStrategy: "all",
},
async ({ body, authentication }) => {
// Generate multiple search queries from user input
const searchQueries = await generateSearchQueries(body.input);
// Execute all search queries in parallel
const searchResults = await Promise.all(
searchQueries.map(query =>
searchService.search(query, authentication.userId, {
limit: Math.ceil(body.limit / searchQueries.length), // Distribute limit across queries
maxBfsDepth: body.maxBfsDepth,
includeInvalidated: body.includeInvalidated,
entityTypes: body.entityTypes,
scoreThreshold: body.scoreThreshold,
minResults: body.minResults,
})
)
// Generate context summary using SearchMemoryAgent
const contextSummary = await generateContextSummary(
body.input,
authentication.userId,
);
// Deduplicate and combine results
const combinedResults = deduplicateResults(searchResults);
// Limit final results if they exceed the requested limit
// Return results with agent-generated context summary
const finalResults = {
facts: combinedResults.facts.slice(0, body.limit),
episodes: combinedResults.episodes.slice(0, body.limit),
queries_used: searchQueries, // Include the generated queries for debugging
context_summary: contextSummary, // Agent's context summary
};
return json(finalResults);

View File

@ -1,142 +0,0 @@
import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
import { getIntegrationDefinitionWithSlug } from "~/services/integrationDefinition.server";
import { proxyRequest } from "~/utils/proxy.server";
import { z } from "zod";
import { getIntegrationAccount } from "~/services/integrationAccount.server";
import { createMCPStdioProxy } from "@core/mcp-proxy";
import { randomUUID } from "node:crypto";
import { configureStdioMCPEnvironment } from "~/trigger/utils/mcp";
export const integrationSlugSchema = z.object({
slug: z.string(),
});
const { action, loader } = createActionApiRoute(
{
params: integrationSlugSchema,
allowJWT: true,
authorization: {
action: "mcp",
},
corsStrategy: "all",
},
async ({ authentication, request, params }) => {
try {
const slug = params.slug;
if (!slug) {
return new Response(
JSON.stringify({ error: "Integration slug is required" }),
{
status: 400,
headers: { "Content-Type": "application/json" },
},
);
}
// Fetch integration definition by slug
const integrationDefinition =
await getIntegrationDefinitionWithSlug(slug);
if (!integrationDefinition) {
return new Response(
JSON.stringify({ error: "Integration not found" }),
{
status: 404,
headers: { "Content-Type": "application/json" },
},
);
}
const spec = integrationDefinition.spec as any;
if (!spec.mcp) {
return new Response(
JSON.stringify({
error: "MCP auth configuration not found for this integration",
}),
{
status: 400,
headers: { "Content-Type": "application/json" },
},
);
}
const { url, type } = spec.mcp;
// Find the integration account for this user and integration
const integrationAccount = await getIntegrationAccount(
integrationDefinition.id,
authentication.userId,
);
if (type === "http") {
const integrationConfig =
integrationAccount?.integrationConfiguration as any;
if (
!integrationAccount ||
!integrationConfig ||
!integrationConfig.mcp
) {
return new Response(
JSON.stringify({
error: "No integration account with mcp config",
}),
{
status: 400,
headers: { "Content-Type": "application/json" },
},
);
}
// Proxy the request to the serverUrl
return await proxyRequest(
request,
url,
integrationConfig.mcp.tokens.access_token,
);
} else {
if (!integrationAccount) {
return new Response(
JSON.stringify({
error: "No integration account found",
}),
{
status: 400,
headers: { "Content-Type": "application/json" },
},
);
}
// Configure environment variables using the utility function
const { env, args } = configureStdioMCPEnvironment(
spec,
integrationAccount,
);
// Get session_id from headers (case-insensitive), or generate a new uuid if not present
const sessionId =
request.headers.get("mcp-session-id") ||
request.headers.get("Mcp-Session-Id") ||
randomUUID();
// Use the saved local file instead of command
const executablePath = `./integrations/${slug}/main`;
return createMCPStdioProxy(request, executablePath, args, {
env,
sessionId,
});
}
} catch (error: any) {
return new Response(JSON.stringify({ error: error.message }), {
status: 500,
headers: { "Content-Type": "application/json" },
});
}
},
);
export { action, loader };

View File

@ -0,0 +1,373 @@
import { json } from "@remix-run/node";
import { randomUUID } from "node:crypto";
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js";
import {
isInitializeRequest,
ListToolsRequestSchema,
CallToolRequestSchema,
} from "@modelcontextprotocol/sdk/types.js";
import { z } from "zod";
import {
createHybridActionApiRoute,
createLoaderApiRoute,
} from "~/services/routeBuilders/apiBuilder.server";
import { handleTransport } from "~/utils/mcp";
import { MCPSessionManager } from "~/utils/mcp/session-manager";
import { TransportManager } from "~/utils/mcp/transport-manager";
import { IntegrationLoader } from "~/utils/mcp/integration-loader";
import { callMemoryTool, memoryTools } from "~/utils/mcp/memory";
import { logger } from "~/services/logger.service";
// Request schemas
const MCPRequestSchema = z.object({}).passthrough();
const QueryParams = z.object({
source: z.string().optional(),
integrations: z.string().optional(), // comma-separated slugs
});
// Common function to create and setup transport
async function createTransport(
sessionId: string,
source: string,
integrations: string[],
userId: string,
workspaceId: string,
): Promise<StreamableHTTPServerTransport> {
const transport = new StreamableHTTPServerTransport({
sessionIdGenerator: () => sessionId,
onsessioninitialized: async (sessionId) => {
// Clean up old sessions (24+ hours) during new session initialization
try {
const [dbCleanupCount, memoryCleanupCount] = await Promise.all([
MCPSessionManager.cleanupOldSessions(),
TransportManager.cleanupOldSessions(),
]);
if (dbCleanupCount > 0 || memoryCleanupCount > 0) {
logger.log(`Cleaned up ${dbCleanupCount} DB sessions and ${memoryCleanupCount} memory sessions`);
}
} catch (error) {
logger.error(`Error during session cleanup: ${error}`);
}
// Store session in database
await MCPSessionManager.upsertSession(sessionId, source, integrations);
// Store main transport
TransportManager.setMainTransport(sessionId, transport);
},
});
// Setup cleanup on close
transport.onclose = async () => {
await MCPSessionManager.deleteSession(sessionId);
await TransportManager.cleanupSession(sessionId);
};
// Load integration transports
try {
const result = await IntegrationLoader.loadIntegrationTransports(
sessionId,
userId,
workspaceId,
integrations.length > 0 ? integrations : undefined,
);
logger.log(
`Loaded ${result.loaded} integration transports for session ${sessionId}`,
);
if (result.failed.length > 0) {
logger.warn(`Failed to load some integrations: ${result.failed}`);
}
} catch (error) {
logger.error(`Error loading integration transports: ${error}`);
}
// Create and connect MCP server
const server = await createMcpServer(userId, sessionId);
await server.connect(transport);
return transport;
}
// Create MCP server with memory tools + dynamic integration tools
async function createMcpServer(userId: string, sessionId: string) {
const server = new Server(
{
name: "core-unified-mcp-server",
version: "1.0.0",
},
{
capabilities: {
tools: {},
},
},
);
// Dynamic tool listing that includes integration tools
server.setRequestHandler(ListToolsRequestSchema, async () => {
// Get integration tools
let integrationTools: any[] = [];
try {
integrationTools =
await IntegrationLoader.getAllIntegrationTools(sessionId);
} catch (error) {
logger.error(`Error loading integration tools: ${error}`);
}
return {
tools: [...memoryTools, ...integrationTools],
};
});
// Handle tool calls for both memory and integration tools
server.setRequestHandler(CallToolRequestSchema, async (request) => {
const { name, arguments: args } = request.params;
// Handle memory tools
if (name.startsWith("memory_")) {
return await callMemoryTool(name, args, userId);
}
// Handle integration tools (prefixed with integration slug)
if (name.includes("_") && !name.startsWith("memory_")) {
try {
return await IntegrationLoader.callIntegrationTool(
sessionId,
name,
args,
);
} catch (error) {
return {
content: [
{
type: "text",
text: `Error calling integration tool: ${error instanceof Error ? error.message : String(error)}`,
},
],
isError: true,
};
}
}
throw new Error(`Unknown tool: ${name}`);
});
server.setRequestHandler(CallToolRequestSchema, async (request) => {
const { name, arguments: args } = request.params;
// Handle memory tools
if (name.startsWith("memory_")) {
return await callMemoryTool(name, args, userId);
}
// Handle integration tools (prefixed with integration slug)
if (name.includes("_") && !name.startsWith("memory_")) {
try {
return await IntegrationLoader.callIntegrationTool(
sessionId,
name,
args,
);
} catch (error) {
return {
content: [
{
type: "text",
text: `Error calling integration tool: ${error instanceof Error ? error.message : String(error)}`,
},
],
isError: true,
};
}
}
throw new Error(`Unknown tool: ${name}`);
});
return server;
}
// Handle MCP requests
const handleMCPRequest = async (
request: Request,
body: any,
authentication: any,
queryParams: z.infer<typeof QueryParams>,
) => {
const sessionId = request.headers.get("mcp-session-id") as string | undefined;
const source = queryParams.source || "api";
const integrations = queryParams.integrations
? queryParams.integrations.split(",").map((s) => s.trim())
: [];
const userId = authentication.userId;
const workspaceId = authentication.workspaceId;
try {
let transport: StreamableHTTPServerTransport;
let currentSessionId = sessionId;
if (sessionId && (await MCPSessionManager.isSessionActive(sessionId))) {
// Use existing session
const sessionData = TransportManager.getSessionInfo(sessionId);
if (!sessionData.exists) {
// Session exists in DB but not in memory, recreate transport
logger.log(`Recreating transport for session ${sessionId}`);
const sessionDetails = await MCPSessionManager.getSession(sessionId);
if (sessionDetails) {
transport = await createTransport(
sessionId,
sessionDetails.source,
sessionDetails.integrations,
userId,
workspaceId,
);
} else {
throw new Error("Session not found in database");
}
} else {
transport = sessionData.mainTransport as StreamableHTTPServerTransport;
}
} else if (!sessionId && isInitializeRequest(body)) {
// New initialization request
currentSessionId = randomUUID();
transport = await createTransport(
currentSessionId,
source,
integrations,
userId,
workspaceId,
);
} else {
// Invalid request
return json(
{
jsonrpc: "2.0",
error: {
code: -32000,
message:
"Bad Request: No valid session ID provided or session inactive",
},
id: body?.id || null,
},
{ status: 400 },
);
}
// Handle the request through existing transport utility
const response = await handleTransport(transport!, request, body);
return response;
} catch (error) {
console.error("MCP SSE request error:", error);
return json(
{
jsonrpc: "2.0",
error: {
code: -32000,
message:
error instanceof Error ? error.message : "Internal server error",
},
id: body?.id || null,
},
{ status: 500 },
);
}
};
// Handle DELETE requests for session cleanup
const handleDelete = async (request: Request) => {
const sessionId = request.headers.get("mcp-session-id") as string | undefined;
if (!sessionId) {
return new Response("Missing session ID", { status: 400 });
}
try {
// Mark session as deleted in database
await MCPSessionManager.deleteSession(sessionId);
// Clean up all transports
await TransportManager.cleanupSession(sessionId);
return new Response(null, { status: 204 });
} catch (error) {
console.error("Error deleting session:", error);
return new Response("Internal server error", { status: 500 });
}
};
const { action } = createHybridActionApiRoute(
{
body: MCPRequestSchema,
searchParams: QueryParams,
allowJWT: true,
authorization: {
action: "mcp",
},
corsStrategy: "all",
},
async ({ body, authentication, request, searchParams }) => {
const method = request.method;
if (method === "POST") {
return await handleMCPRequest(
request,
body,
authentication,
searchParams,
);
} else if (method === "DELETE") {
return await handleDelete(request);
} else {
return json(
{
jsonrpc: "2.0",
error: {
code: -32601,
message: "Method not allowed",
},
id: null,
},
{ status: 405 },
);
}
},
);
const loader = createLoaderApiRoute(
{
allowJWT: true,
corsStrategy: "all",
findResource: async () => 1,
},
async ({ request }) => {
// Handle SSE requests (for server-to-client notifications)
const sessionId = request.headers.get("mcp-session-id");
if (!sessionId) {
return new Response("Missing session ID for SSE", { status: 400 });
}
const sessionData = TransportManager.getSessionInfo(sessionId);
if (!sessionData.exists) {
// Check if session exists in database and recreate transport
const sessionDetails = await MCPSessionManager.getSession(sessionId);
if (!sessionDetails) {
return new Response("Session not found", { status: 404 });
}
// Session exists in DB but not in memory - need authentication to recreate
return new Response("Session not found", { status: 404 });
}
// Return SSE stream (this would be handled by the transport's handleRequest method)
// For now, just return session info
return json({
sessionId,
active: await MCPSessionManager.isSessionActive(sessionId),
integrationCount: sessionData.integrationCount,
createdAt: sessionData.createdAt,
});
},
);
export { action, loader };

View File

@ -7,6 +7,8 @@ import { SpaceFactsFilters } from "~/components/spaces/space-facts-filters";
import { SpaceFactsList } from "~/components/spaces/space-facts-list";
import type { StatementNode } from "@core/types";
import { ClientOnly } from "remix-utils/client-only";
import { LoaderCircle } from "lucide-react";
export async function loader({ request, params }: LoaderFunctionArgs) {
const userId = await requireUserId(request);
@ -88,12 +90,18 @@ export default function Facts() {
/>
<div className="flex h-[calc(100vh_-_140px)] w-full">
<SpaceFactsList
facts={filteredStatements}
hasMore={false} // TODO: Implement real pagination
loadMore={loadMore}
isLoading={false}
/>
<ClientOnly
fallback={<LoaderCircle className="mr-2 h-4 w-4 animate-spin" />}
>
{() => (
<SpaceFactsList
facts={filteredStatements}
hasMore={false} // TODO: Implement real pagination
loadMore={loadMore}
isLoading={false}
/>
)}
</ClientOnly>
</div>
</div>
);

View File

@ -5,7 +5,13 @@ import {
CollapsibleTrigger,
} from "~/components/ui/collapsible";
import { Button } from "~/components/ui";
import { Activity, AlertCircle, ChevronDown, Clock } from "lucide-react";
import {
Activity,
AlertCircle,
ChevronDown,
Clock,
LoaderCircle,
} from "lucide-react";
import React from "react";
import {
Popover,
@ -14,12 +20,17 @@ import {
} from "~/components/ui/popover";
import { getIcon, IconPicker } from "~/components/icon-picker";
import { SpaceSummary } from "~/components/spaces/space-summary.client";
import { type LoaderFunctionArgs } from "@remix-run/server-runtime";
import {
type ActionFunctionArgs,
redirect,
type LoaderFunctionArgs,
} from "@remix-run/server-runtime";
import { requireUserId } from "~/services/session.server";
import { SpaceService } from "~/services/space.server";
import { useTypedLoaderData } from "remix-typedjson";
import { useFetcher } from "@remix-run/react";
import { Badge } from "~/components/ui/badge";
import { ClientOnly } from "remix-utils/client-only";
export async function loader({ request, params }: LoaderFunctionArgs) {
const userId = await requireUserId(request);
@ -32,6 +43,27 @@ export async function loader({ request, params }: LoaderFunctionArgs) {
return space;
}
export async function action({ request, params }: ActionFunctionArgs) {
const userId = await requireUserId(request);
const spaceService = new SpaceService();
const spaceId = params.spaceId;
if (!spaceId) {
throw new Error("Space ID is required");
}
const formData = await request.formData();
const icon = formData.get("icon");
if (typeof icon !== "string") {
throw new Error("Invalid icon data");
}
await spaceService.updateSpace(spaceId, { icon }, userId);
return redirect(`/home/space/${spaceId}/overview`);
}
// Helper function to get status display info
function getStatusDisplay(status?: string | null) {
switch (status) {
@ -126,7 +158,11 @@ export default function Overview() {
</CollapsibleTrigger>
<CollapsibleContent>
<div className="text-md">
<SpaceSummary summary={space.summary} />
<ClientOnly
fallback={<LoaderCircle className="mr-2 h-4 w-4 animate-spin" />}
>
{() => <SpaceSummary summary={space.summary} />}
</ClientOnly>
</div>
</CollapsibleContent>
</Collapsible>

View File

@ -1,15 +1,12 @@
import { PageHeader } from "~/components/common/page-header";
import {
type LoaderFunctionArgs,
type ActionFunctionArgs,
redirect,
} from "@remix-run/server-runtime";
import { type LoaderFunctionArgs } from "@remix-run/server-runtime";
import { requireUserId } from "~/services/session.server";
import { ClientOnly } from "remix-utils/client-only";
import { SpaceService } from "~/services/space.server";
import { useTypedLoaderData } from "remix-typedjson";
import { Outlet, useLocation, useNavigate } from "@remix-run/react";
import { SpaceOptions } from "~/components/spaces/space-options";
import { LoaderCircle } from "lucide-react";
export async function loader({ request, params }: LoaderFunctionArgs) {
const userId = await requireUserId(request);
@ -22,27 +19,6 @@ export async function loader({ request, params }: LoaderFunctionArgs) {
return space;
}
export async function action({ request, params }: ActionFunctionArgs) {
const userId = await requireUserId(request);
const spaceService = new SpaceService();
const spaceId = params.spaceId;
if (!spaceId) {
throw new Error("Space ID is required");
}
const formData = await request.formData();
const icon = formData.get("icon");
if (typeof icon !== "string") {
throw new Error("Invalid icon data");
}
await spaceService.updateSpace(spaceId, { icon }, userId);
return redirect(`/home/space/${spaceId}`);
}
export default function Space() {
const space = useTypedLoaderData<typeof loader>();
const location = useLocation();
@ -77,11 +53,21 @@ export default function Space() {
},
]}
actionsNode={
<SpaceOptions
id={space.id as string}
name={space.name}
description={space.description}
/>
<ClientOnly
fallback={
<div>
<LoaderCircle className="mr-2 h-4 w-4 animate-spin" />
</div>
}
>
{() => (
<SpaceOptions
id={space.id as string}
name={space.name}
description={space.description}
/>
)}
</ClientOnly>
}
/>
<div className="relative flex h-[calc(100vh_-_56px)] w-full flex-col items-center justify-start overflow-auto">

View File

@ -15,7 +15,6 @@ import { Card, CardContent } from "~/components/ui/card";
import Logo from "~/components/logo/logo";
import {
AlignLeft,
LayoutGrid,
Pen,
User,
Mail,
@ -25,6 +24,7 @@ import {
ArrowRightLeft,
} from "lucide-react";
import { useState } from "react";
import { getIconForAuthorise } from "~/components/icon-utils";
export const loader = async ({ request }: LoaderFunctionArgs) => {
// Check if user is authenticated
@ -221,6 +221,8 @@ export default function OAuthAuthorize() {
return "Read access to your account";
case "write":
return "Write access to your account";
case "mcp":
return "Access to memory and integrations";
default:
return `Access to ${scope}`;
}
@ -231,15 +233,7 @@ export default function OAuthAuthorize() {
<Card className="bg-background-3 shadow-1 w-full max-w-md rounded-lg p-5">
<CardContent>
<div className="flex items-center justify-center gap-4">
{client.logoUrl ? (
<img
src={client.logoUrl}
alt={client.name}
className="h-[40px] w-[40px] rounded"
/>
) : (
<LayoutGrid size={40} />
)}
{getIconForAuthorise(client.name, client.logoUrl)}
<ArrowRightLeft size={16} />
<Logo width={40} height={40} />
</div>

View File

@ -0,0 +1,226 @@
import { prisma } from "~/db.server";
import { TransportManager } from "./transport-manager";
export interface IntegrationAccountWithDefinition {
id: string;
integrationDefinitionId: string;
accountId: string | null;
integrationConfiguration: any;
isActive: boolean;
integrationDefinition: {
id: string;
name: string;
slug: string;
spec: any;
};
}
/**
* Loads and manages integration accounts for MCP sessions
*/
export class IntegrationLoader {
/**
* Get all connected and active integration accounts for a user/workspace
* Filtered by integration slugs if provided
*/
static async getConnectedIntegrationAccounts(
userId: string,
workspaceId: string,
integrationSlugs?: string[],
): Promise<IntegrationAccountWithDefinition[]> {
const whereClause: any = {
integratedById: userId,
workspaceId: workspaceId,
isActive: true,
deleted: null,
};
// Filter by integration slugs if provided
if (integrationSlugs && integrationSlugs.length > 0) {
whereClause.integrationDefinition = {
slug: {
in: integrationSlugs,
},
};
}
const integrationAccounts = await prisma.integrationAccount.findMany({
where: whereClause,
include: {
integrationDefinition: {
select: {
id: true,
name: true,
slug: true,
spec: true,
},
},
},
});
return integrationAccounts;
}
/**
* Get integration accounts that have MCP configuration
*/
static async getMcpEnabledIntegrationAccounts(
userId: string,
workspaceId: string,
integrationSlugs?: string[],
): Promise<IntegrationAccountWithDefinition[]> {
const accounts = await this.getConnectedIntegrationAccounts(
userId,
workspaceId,
integrationSlugs,
);
// Filter for accounts with MCP configuration
return accounts.filter((account) => {
const spec = account.integrationDefinition.spec;
return spec && spec.mcp && spec.mcp.type && spec.mcp.url;
});
}
/**
* Load integration transports for a session
*/
static async loadIntegrationTransports(
sessionId: string,
userId: string,
workspaceId: string,
integrationSlugs?: string[],
): Promise<{
loaded: number;
failed: Array<{ slug: string; error: string }>;
}> {
const accounts = await this.getMcpEnabledIntegrationAccounts(
userId,
workspaceId,
integrationSlugs,
);
let loaded = 0;
const failed: Array<{ slug: string; error: string }> = [];
for (const account of accounts) {
try {
const spec = account.integrationDefinition.spec;
const mcpConfig = spec.mcp;
if (mcpConfig.type === "http") {
// Get access token from integration configuration
let accessToken: string | undefined;
const integrationConfig = account.integrationConfiguration as any;
if (
integrationConfig &&
integrationConfig.mcp &&
integrationConfig.mcp.tokens
) {
accessToken = integrationConfig.mcp.tokens.access_token;
}
// Create HTTP transport for this integration
await TransportManager.addIntegrationTransport(
sessionId,
account.id,
account.integrationDefinition.slug,
mcpConfig.url,
accessToken,
);
loaded++;
} else {
// Skip non-HTTP transports for now
failed.push({
slug: account.integrationDefinition.slug,
error: `Unsupported transport type: ${mcpConfig.type}`,
});
}
} catch (error) {
failed.push({
slug: account.integrationDefinition.slug,
error: error instanceof Error ? error.message : "Unknown error",
});
}
}
return { loaded, failed };
}
/**
* Get tools from all connected integration accounts
*/
static async getAllIntegrationTools(sessionId: string) {
const integrationTransports =
TransportManager.getSessionIntegrationTransports(sessionId);
const allTools: any[] = [];
for (const integrationTransport of integrationTransports) {
try {
const result = await integrationTransport.client.listTools();
if (result.tools && Array.isArray(result.tools)) {
// Prefix tool names with integration slug to avoid conflicts
const prefixedTools = result.tools.map((tool: any) => ({
...tool,
name: `${integrationTransport.slug}_${tool.name}`,
description: `[${integrationTransport.slug}] ${tool.description || tool.name}`,
_integration: {
slug: integrationTransport.slug,
accountId: integrationTransport.integrationAccountId,
originalName: tool.name,
},
}));
allTools.push(...prefixedTools);
}
} catch (error) {
console.error(
`Failed to get tools from integration ${integrationTransport.slug}:`,
error,
);
}
}
return allTools;
}
/**
* Call a tool on a specific integration
*/
static async callIntegrationTool(
sessionId: string,
toolName: string,
args: any,
): Promise<any> {
// Parse tool name to extract integration slug
const parts = toolName.split("_");
if (parts.length < 2) {
throw new Error("Invalid tool name format");
}
const integrationSlug = parts[0];
const originalToolName = parts.slice(1).join("_");
// Find the integration transport
const integrationTransports =
TransportManager.getSessionIntegrationTransports(sessionId);
const integrationTransport = integrationTransports.find(
(t) => t.slug === integrationSlug,
);
if (!integrationTransport) {
throw new Error(
`Integration ${integrationSlug} not found or not connected`,
);
}
// Call the tool
return await integrationTransport.client.callTool({
name: originalToolName,
arguments: args,
});
}
}

View File

@ -0,0 +1,186 @@
import { addToQueue } from "~/lib/ingest.server";
import { logger } from "~/services/logger.service";
import { SearchService } from "~/services/search.server";
import { SpaceService } from "~/services/space.server";
const searchService = new SearchService();
const spaceService = new SpaceService();
// Memory tool schemas (from existing memory endpoint)
const SearchParamsSchema = {
type: "object",
properties: {
query: {
type: "string",
description: "The search query in third person perspective",
},
validAt: {
type: "string",
description: "The valid at time in ISO format",
},
startTime: {
type: "string",
description: "The start time in ISO format",
},
endTime: {
type: "string",
description: "The end time in ISO format",
},
spaceIds: {
type: "array",
items: {
type: "string",
},
description: "Array of strings representing UUIDs of spaces",
},
},
required: ["query"],
};
const IngestSchema = {
type: "object",
properties: {
message: {
type: "string",
description: "The data to ingest in text format",
},
},
required: ["message"],
};
export const memoryTools = [
{
name: "memory_ingest",
description: "Ingest data into the Echo memory system",
inputSchema: IngestSchema,
},
{
name: "memory_search",
description: "Search through ingested memory data",
inputSchema: SearchParamsSchema,
},
];
// Function to call memory tools based on toolName
export async function callMemoryTool(
toolName: string,
args: any,
userId: string,
) {
try {
switch (toolName) {
case "memory_ingest":
return await handleMemoryIngest({ ...args, userId });
case "memory_search":
return await handleMemorySearch({ ...args, userId });
case "memory_get_spaces":
return await handleMemoryGetSpaces(userId);
default:
throw new Error(`Unknown memory tool: ${toolName}`);
}
} catch (error) {
console.error(`Error calling memory tool ${toolName}:`, error);
return {
content: [
{
type: "text",
text: `Error calling memory tool: ${error instanceof Error ? error.message : String(error)}`,
},
],
isError: true,
};
}
}
// Handler for memory_ingest
async function handleMemoryIngest(args: any) {
try {
const response = addToQueue(
{
episodeBody: args.message,
referenceTime: new Date().toISOString(),
source: args.source,
},
args.userId,
);
return {
content: [
{
type: "text",
text: JSON.stringify(response),
},
],
};
} catch (error) {
logger.error(`MCP memory ingest error: ${error}`);
return {
content: [
{
type: "text",
text: `Error ingesting data: ${error instanceof Error ? error.message : String(error)}`,
},
],
isError: true,
};
}
}
// Handler for memory_search
async function handleMemorySearch(args: any) {
try {
const results = await searchService.search(args.query, args.userId, {
startTime: args.startTime ? new Date(args.startTime) : undefined,
endTime: args.endTime ? new Date(args.endTime) : undefined,
});
return {
content: [
{
type: "text",
text: JSON.stringify(results),
},
],
};
} catch (error) {
logger.error(`MCP memory search error: ${error}`);
return {
content: [
{
type: "text",
text: `Error searching memory: ${error instanceof Error ? error.message : String(error)}`,
},
],
isError: true,
};
}
}
// Handler for memory_get_spaces
async function handleMemoryGetSpaces(userId: string) {
try {
const spaces = await spaceService.getUserSpaces(userId);
return {
content: [
{
type: "text",
text: JSON.stringify(spaces),
},
],
isError: false,
};
} catch (error) {
logger.error(`MCP get spaces error: ${error}`);
return {
content: [
{
type: "text",
text: `Error getting spaces: ${error instanceof Error ? error.message : String(error)}`,
},
],
isError: true,
};
}
}

View File

@ -0,0 +1,133 @@
import { prisma } from "~/db.server";
export interface MCPSessionData {
id: string;
source: string;
integrations: string[];
createdAt: Date;
deleted?: Date;
}
export class MCPSessionManager {
/**
* Create or update an MCP session
*/
static async upsertSession(
sessionId: string,
source: string,
integrations: string[],
): Promise<MCPSessionData> {
// Try to find existing session
let session = await prisma.mCPSession.findUnique({
where: { id: sessionId },
});
if (session) {
// Update existing session
session = await prisma.mCPSession.update({
where: { id: sessionId },
data: {
source,
integrations,
},
});
} else {
// Create new session
session = await prisma.mCPSession.create({
data: {
id: sessionId,
source,
integrations,
},
});
}
return {
id: session.id,
source: session.source,
integrations: session.integrations,
createdAt: session.createdAt,
deleted: session.deleted || undefined,
};
}
/**
* Mark a session as deleted
*/
static async deleteSession(sessionId: string): Promise<void> {
await prisma.mCPSession.update({
where: { id: sessionId },
data: {
deleted: new Date(),
},
});
}
/**
* Get session data
*/
static async getSession(sessionId: string): Promise<MCPSessionData | null> {
const session = await prisma.mCPSession.findUnique({
where: { id: sessionId },
});
if (!session) return null;
return {
id: session.id,
source: session.source,
integrations: session.integrations,
createdAt: session.createdAt,
deleted: session.deleted || undefined,
};
}
/**
* Get all active sessions (not deleted)
*/
static async getActiveSessions(): Promise<MCPSessionData[]> {
const sessions = await prisma.mCPSession.findMany({
where: {
deleted: null,
},
});
return sessions.map((session) => ({
id: session.id,
source: session.source,
integrations: session.integrations,
createdAt: session.createdAt,
}));
}
/**
* Clean up old sessions (older than 24 hours)
*/
static async cleanupOldSessions(): Promise<number> {
const twentyFourHoursAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
const result = await prisma.mCPSession.updateMany({
where: {
createdAt: { lt: twentyFourHoursAgo },
deleted: null,
},
data: {
deleted: new Date(),
},
});
return result.count;
}
/**
* Check if session is active (not deleted)
*/
static async isSessionActive(sessionId: string): Promise<boolean> {
const session = await prisma.mCPSession.findUnique({
where: { id: sessionId },
select: { deleted: true },
});
return session ? !session.deleted : false;
}
}

View File

@ -0,0 +1,231 @@
import { type StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js";
import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js";
import { Client as McpClient } from "@modelcontextprotocol/sdk/client/index.js";
export interface IntegrationTransport {
client: McpClient;
transport: StreamableHTTPClientTransport;
integrationAccountId: string;
slug: string;
url: string;
}
export interface SessionTransports {
mainTransport?: StreamableHTTPServerTransport;
integrationTransports: Map<string, IntegrationTransport>;
createdAt: number;
}
/**
* Manages MCP transports for sessions and integrations
*/
export class TransportManager {
private static transports = new Map<string, SessionTransports>();
/**
* Create or get session transports
*/
static getOrCreateSession(sessionId: string): SessionTransports {
let session = this.transports.get(sessionId);
if (!session) {
session = {
integrationTransports: new Map(),
createdAt: Date.now(),
};
this.transports.set(sessionId, session);
}
return session;
}
/**
* Set the main server transport for a session
*/
static setMainTransport(
sessionId: string,
transport: StreamableHTTPServerTransport,
): void {
const session = this.getOrCreateSession(sessionId);
session.mainTransport = transport;
// Setup cleanup on transport close
transport.onclose = () => {
this.cleanupSession(sessionId);
};
}
/**
* Add an integration transport to a session
*/
static async addIntegrationTransport(
sessionId: string,
integrationAccountId: string,
slug: string,
url: string,
accessToken?: string,
): Promise<IntegrationTransport> {
const session = this.getOrCreateSession(sessionId);
// Create HTTP transport for the integration
const transport = new StreamableHTTPClientTransport(new URL(url), {
requestInit: {
headers: accessToken
? {
Authorization: `Bearer ${accessToken}`,
}
: {},
},
});
// Create MCP client
const client = new McpClient({
name: `core-client-${slug}`,
version: "1.0.0",
});
// Connect client to transport
await client.connect(transport);
const integrationTransport: IntegrationTransport = {
client,
transport,
integrationAccountId,
slug,
url,
};
session.integrationTransports.set(
integrationAccountId,
integrationTransport,
);
return integrationTransport;
}
/**
* Get integration transport by account ID
*/
static getIntegrationTransport(
sessionId: string,
integrationAccountId: string,
): IntegrationTransport | undefined {
const session = this.transports.get(sessionId);
return session?.integrationTransports.get(integrationAccountId);
}
/**
* Get all integration transports for a session
*/
static getSessionIntegrationTransports(
sessionId: string,
): IntegrationTransport[] {
const session = this.transports.get(sessionId);
return session ? Array.from(session.integrationTransports.values()) : [];
}
/**
* Remove an integration transport
*/
static async removeIntegrationTransport(
sessionId: string,
integrationAccountId: string,
): Promise<void> {
const session = this.transports.get(sessionId);
if (!session) return;
const integrationTransport =
session.integrationTransports.get(integrationAccountId);
if (integrationTransport) {
// Close the transport
await integrationTransport.transport.close();
// Remove from map
session.integrationTransports.delete(integrationAccountId);
}
}
/**
* Clean up entire session and all its transports
*/
static async cleanupSession(sessionId: string): Promise<void> {
const session = this.transports.get(sessionId);
if (!session) return;
// Close all integration transports
for (const [
accountId,
integrationTransport,
] of session.integrationTransports) {
try {
await integrationTransport.transport.close();
} catch (error) {
console.error(
`Error closing integration transport ${accountId}:`,
error,
);
}
}
// Close main transport if exists
if (session.mainTransport) {
try {
session.mainTransport.close();
} catch (error) {
console.error(
`Error closing main transport for session ${sessionId}:`,
error,
);
}
}
// Remove from map
this.transports.delete(sessionId);
}
/**
* Get session info
*/
static getSessionInfo(sessionId: string): {
exists: boolean;
integrationCount: number;
createdAt?: number;
mainTransport?: StreamableHTTPServerTransport;
} {
const session = this.transports.get(sessionId);
return {
exists: !!session,
integrationCount: session?.integrationTransports.size || 0,
createdAt: session?.createdAt,
mainTransport: session?.mainTransport,
};
}
/**
* Clean up old sessions (older than specified time)
*/
static async cleanupOldSessions(
maxAgeMs: number = 24 * 60 * 60 * 1000,
): Promise<number> {
const now = Date.now();
const sessionsToCleanup: string[] = [];
for (const [sessionId, session] of this.transports) {
if (now - session.createdAt > maxAgeMs) {
sessionsToCleanup.push(sessionId);
}
}
for (const sessionId of sessionsToCleanup) {
await this.cleanupSession(sessionId);
}
return sessionsToCleanup.length;
}
/**
* Get all active sessions
*/
static getActiveSessions(): string[] {
return Array.from(this.transports.keys());
}
}

View File

@ -64,6 +64,7 @@ model Conversation {
status String @default("pending") // Can be "pending", "running", "completed", "failed", "need_attention"
ConversationHistory ConversationHistory[]
RecallLog RecallLog[]
}
model ConversationExecutionStep {
@ -109,18 +110,6 @@ model ConversationHistory {
ConversationExecutionStep ConversationExecutionStep[]
}
model Entity {
id String @id @default(cuid())
name String @unique // e.g., "User", "Issue", "Task", "Automation"
metadata Json // Store field definitions and their types
// Relations
spaceEntities SpaceEntity[]
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
model IngestionQueue {
id String @id @default(cuid())
@ -287,6 +276,8 @@ model OAuthClient {
// PKCE support
requirePkce Boolean @default(false)
clientType String @default("regular")
// Client metadata
logoUrl String?
homepageUrl String?
@ -299,12 +290,12 @@ model OAuthClient {
isActive Boolean @default(true)
// Workspace relationship (like GitHub orgs)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
workspaceId String
workspace Workspace? @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
workspaceId String?
// Created by user (for audit trail)
createdBy User @relation(fields: [createdById], references: [id])
createdById String
createdBy User? @relation(fields: [createdById], references: [id])
createdById String?
// Relations
oauthAuthorizationCodes OAuthAuthorizationCode[]
@ -421,41 +412,97 @@ model PersonalAccessToken {
authorizationCodes AuthorizationCode[]
}
model Space {
id String @id @default(cuid())
name String
description String?
autoMode Boolean @default(false)
model RecallLog {
id String @id @default(uuid())
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
deleted DateTime?
// Access details
accessType String // "search", "recall", "direct_access"
query String? // Search query (null for direct access)
// Target information
targetType String? // "episode", "statement", "entity", "mixed_results"
targetId String? // UUID of specific target (null for search with multiple results)
// Search/access parameters
searchMethod String? // "semantic", "keyword", "hybrid", "contextual", "graph_traversal"
minSimilarity Float? // Minimum similarity threshold used
maxResults Int? // Maximum results requested
// Results and interaction
resultCount Int @default(0) // Number of results returned
similarityScore Float? // Similarity score (for single result access)
// Context and source
context String? // Additional context
source String? // Source of the access (e.g., "chat", "api", "integration")
sessionId String? // Session identifier
// Performance metrics
responseTimeMs Int? // Response time in milliseconds
// Relations
user User @relation(fields: [userId], references: [id])
userId String
// Space's enabled entities
enabledEntities SpaceEntity[]
workspace Workspace? @relation(fields: [workspaceId], references: [id])
workspaceId String?
conversation Conversation? @relation(fields: [conversationId], references: [id])
conversationId String?
// Metadata for additional tracking data
metadata Json? @default("{}")
}
model Space {
id String @id @default(cuid())
name String
description String?
autoMode Boolean @default(false)
summary String?
themes String[]
statementCount Int?
status String?
icon String?
lastPatternTrigger DateTime?
statementCountAtLastTrigger Int?
// Relations
workspace Workspace @relation(fields: [workspaceId], references: [id])
workspaceId String
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
IngestionQueue IngestionQueue[]
SpacePattern SpacePattern[]
}
model SpaceEntity {
id String @id @default(cuid())
model SpacePattern {
id String @id @default(cuid())
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
deleted DateTime?
name String
source String
type String
summary String
editedSummary String?
evidence String[]
confidence Float
userConfirmed String @default("pending")
// Relations
space Space @relation(fields: [spaceId], references: [id])
space Space @relation(fields: [spaceId], references: [id], onDelete: Cascade)
spaceId String
entity Entity @relation(fields: [entityId], references: [id])
entityId String
// Custom settings for this entity in this space
settings Json?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@unique([spaceId, entityId])
@@index([spaceId, source])
}
model User {
@ -480,13 +527,13 @@ model User {
marketingEmails Boolean @default(true)
confirmedBasicDetails Boolean @default(false)
onboardingComplete Boolean @default(false)
referralSource String?
personalAccessTokens PersonalAccessToken[]
InvitationCode InvitationCode? @relation(fields: [invitationCodeId], references: [id])
invitationCodeId String?
Space Space[]
Workspace Workspace?
IntegrationAccount IntegrationAccount[]
WebhookConfiguration WebhookConfiguration[]
@ -502,6 +549,7 @@ model User {
oauthIntegrationGrants OAuthIntegrationGrant[]
oAuthClientInstallation OAuthClientInstallation[]
UserUsage UserUsage?
RecallLog RecallLog[]
}
model UserUsage {
@ -576,6 +624,8 @@ model Workspace {
OAuthAuthorizationCode OAuthAuthorizationCode[]
OAuthAccessToken OAuthAccessToken[]
OAuthRefreshToken OAuthRefreshToken[]
RecallLog RecallLog[]
Space Space[]
}
enum AuthenticationMethod {

View File

@ -57,7 +57,7 @@ async function init() {
"refresh_token",
"client_credentials",
],
code_challenge_methods_supported: ["S256"],
code_challenge_methods_supported: ["S256", "plain"],
token_endpoint_auth_methods_supported: [
"client_secret_basic",
"none",

View File

@ -1,12 +1,9 @@
import { defineConfig } from "@trigger.dev/sdk/v3";
import {
additionalPackages,
syncEnvVars,
} from "@trigger.dev/build/extensions/core";
import { syncEnvVars } from "@trigger.dev/build/extensions/core";
import { prismaExtension } from "@trigger.dev/build/extensions/prisma";
export default defineConfig({
project: "proj_jqsgldpqilpdnvpzyzll",
project: process.env.TRIGGER_PROJECT_ID as string,
runtime: "node",
logLevel: "log",
// The max compute seconds a task is allowed to run. If the task run exceeds this duration, it will be stopped.

View File

@ -1,2 +0,0 @@
BASE_URL=https://core.heysol.ai
API_KEY=

View File

@ -1,253 +0,0 @@
#!/usr/bin/env node
const fs = require("fs");
const path = require("path");
const axios = require("axios");
/**
* LOCOMO Q&A Evaluation Script
* Evaluates question answering against ingested LOCOMO conversations
* Assumes conversations are already ingested via ingest_conversations.js
*/
class LocomoEvaluator {
constructor(baseUrl = "http://localhost:3033") {
this.baseUrl = baseUrl;
this.headers = {
Authorization: "Bearer rc_pat_92bdumc45dwwmfxrr4xy2bk96pstt1j7opj6t412",
};
this.results = [];
// Create axios instance with default config
this.axios = axios.create({
baseURL: this.baseUrl,
headers: this.headers,
timeout: 10000,
});
}
async makeRequest(endpoint, data) {
try {
const response = await this.axios.post(endpoint, data, {
headers: {
"Content-Type": "application/json",
},
});
return response.data;
} catch (error) {
if (error.response) {
throw new Error(`HTTP ${error.response.status}: ${JSON.stringify(error.response.data)}`);
} else if (error.request) {
throw new Error(`No response received: ${error.message}`);
} else {
throw new Error(`Request error: ${error.message}`);
}
}
}
async searchMemory(question, conversationId = null) {
try {
const response = await this.makeRequest("/api/v1/search", {
query: question,
limit: 10,
});
return response;
} catch (error) {
console.error("Search error:", error.message);
return { results: [] };
}
}
async evaluateQuestion(question, expectedAnswer, evidence, conversationId, category) {
// Search for relevant context
const searchResults = await this.searchMemory(question, conversationId);
// Handle different API response formats
const episodes = searchResults.episodes || searchResults.results || [];
// Extract relevant context
const context = episodes.map((episode) => {
if (typeof episode === 'string') {
return episode;
}
return episode.content || episode.text || episode;
}).join("\n");
// Basic relevance scoring
const hasContext = episodes.length > 0;
const contextLength = context.length;
// Check if expected answer appears in context (simple matching)
const answerInContext = context.toLowerCase().includes(expectedAnswer.toString().toLowerCase());
return {
question,
expectedAnswer,
evidence,
category,
searchContext: context,
searchResultsCount: episodes.length,
hasContext,
contextLength,
answerInContext,
conversationId,
facts: searchResults.facts || [],
};
}
async evaluateConversation(conversation, conversationId) {
console.log(`Evaluating conversation ${conversationId}...`);
const qaResults = [];
const totalQuestions = conversation.qa.length;
for (const [index, qa] of conversation.qa.entries()) {
if (index === 0) {
try {
const result = await this.evaluateQuestion(
qa.question,
qa.answer,
qa.evidence,
conversationId,
qa.category
);
qaResults.push(result);
// Progress indicator
if ((index + 1) % 25 === 0) {
console.log(` Evaluated ${index + 1}/${totalQuestions} questions`);
}
// Small delay to avoid overwhelming the system
await new Promise((resolve) => setTimeout(resolve, 25));
} catch (error) {
console.error(`Error evaluating question ${index}:`, error.message);
}
}
}
return qaResults;
}
async runEvaluation() {
console.log("Starting LOCOMO Q&A evaluation...");
// Load LOCOMO dataset
const dataPath = path.join(__dirname, "data", "locomo10.json");
const conversations = JSON.parse(fs.readFileSync(dataPath, "utf8"));
console.log(`Loaded ${conversations.length} conversations for evaluation`);
// Evaluate each conversation
for (let i = 0; i < conversations.length; i++) {
const conversation = conversations[i];
const conversationId = `locomo_${i + 1}`;
if (i === 0) {
try {
const results = await this.evaluateConversation(conversation, conversationId);
this.results.push({
conversationId,
results,
totalQuestions: conversation.qa.length,
});
} catch (error) {
console.error(`Error evaluating conversation ${conversationId}:`, error.message);
}
}
}
// Save and summarize results
this.saveResults();
this.printDetailedSummary();
}
saveResults() {
const resultsPath = path.join(__dirname, "evaluation_results.json");
const timestamp = new Date().toISOString();
const output = {
timestamp,
summary: this.calculateSummaryStats(),
conversations: this.results,
};
fs.writeFileSync(resultsPath, JSON.stringify(output, null, 2));
console.log(`\nResults saved to ${resultsPath}`);
}
calculateSummaryStats() {
const totalQuestions = this.results.reduce((sum, conv) => sum + conv.totalQuestions, 0);
const questionsWithContext = this.results.reduce(
(sum, conv) => sum + conv.results.filter((r) => r.hasContext).length,
0
);
const questionsWithAnswerInContext = this.results.reduce(
(sum, conv) => sum + conv.results.filter((r) => r.answerInContext).length,
0
);
// Category breakdown
const categoryStats = {};
this.results.forEach((conv) => {
conv.results.forEach((result) => {
const cat = result.category || "unknown";
if (!categoryStats[cat]) {
categoryStats[cat] = { total: 0, withContext: 0, withAnswer: 0 };
}
categoryStats[cat].total++;
if (result.hasContext) categoryStats[cat].withContext++;
if (result.answerInContext) categoryStats[cat].withAnswer++;
});
});
return {
totalQuestions,
questionsWithContext,
questionsWithAnswerInContext,
contextRetrievalRate: ((questionsWithContext / totalQuestions) * 100).toFixed(1),
answerFoundRate: ((questionsWithAnswerInContext / totalQuestions) * 100).toFixed(1),
categoryBreakdown: categoryStats,
};
}
printDetailedSummary() {
const stats = this.calculateSummaryStats();
console.log("\n=== LOCOMO EVALUATION RESULTS ===");
console.log(`Total conversations: ${this.results.length}`);
console.log(`Total questions: ${stats.totalQuestions}`);
console.log(
`Questions with retrieved context: ${stats.questionsWithContext}/${stats.totalQuestions} (${stats.contextRetrievalRate}%)`
);
console.log(
`Questions with answer in context: ${stats.questionsWithAnswerInContext}/${stats.totalQuestions} (${stats.answerFoundRate}%)`
);
console.log("\n=== CATEGORY BREAKDOWN ===");
Object.entries(stats.categoryBreakdown).forEach(([category, stats]) => {
console.log(
`Category ${category}: ${stats.withAnswer}/${stats.total} (${((stats.withAnswer / stats.total) * 100).toFixed(1)}%) answers found`
);
});
console.log("\n=== PERFORMANCE INSIGHTS ===");
const avgContextLength =
this.results.reduce(
(sum, conv) => sum + conv.results.reduce((s, r) => s + r.contextLength, 0),
0
) / stats.totalQuestions;
console.log(`Average context length: ${avgContextLength.toFixed(0)} characters`);
console.log("\nNote: This evaluation measures retrieval performance. For accuracy scoring,");
console.log("consider implementing LLM-based answer generation and comparison.");
}
}
// Command line interface
if (require.main === module) {
const evaluator = new LocomoEvaluator();
evaluator.runEvaluation().catch(console.error);
}
module.exports = LocomoEvaluator;

View File

@ -1,44 +0,0 @@
{
"timestamp": "2025-08-11T15:08:14.955Z",
"summary": {
"totalQuestions": 199,
"questionsWithContext": 1,
"questionsWithAnswerInContext": 0,
"contextRetrievalRate": "0.5",
"answerFoundRate": "0.0",
"categoryBreakdown": {
"2": {
"total": 1,
"withContext": 1,
"withAnswer": 0
}
}
},
"conversations": [
{
"conversationId": "locomo_1",
"results": [
{
"question": "When did Caroline go to the LGBTQ support group?",
"expectedAnswer": "7 May 2023",
"evidence": [
"D1:3"
],
"category": 2,
"searchContext": "Caroline attended an LGBTQ support group, where she was inspired by transgender stories and felt accepted, which encouraged her to embrace herself. Caroline plans to continue her education and is interested in exploring career options in counseling or mental health. Melanie expressed appreciation for Caroline's empathy and shared that she painted a lake sunrise last year as a form of self-expression. Caroline complimented Melanie's painting, and both agreed that painting is a valuable outlet for relaxation and self-expression. Caroline intended to do research after the conversation, while Melanie planned to go swimming with her children. Both Caroline and Melanie emphasized the importance of self-care.\nCaroline informed Melanie on 20 July 2023 that she recently joined a new LGBTQ activist group called \"Connected LGBTQ Activists\" and is enjoying making a difference. Melanie expressed happiness for Caroline and showed interest in learning more about the group. Caroline explained that the group focuses on positive change and mutual support. Caroline participated in a recent pride parade in their city, which she described as a powerful reminder of the fight for equality. Melanie shared that she recently went to the beach with her children, which they enjoyed, and mentioned that her family's summer highlight is a camping trip where they witnessed the Perseid meteor shower, an experience that made her feel awe for the universe and appreciate life. Melanie also shared a special memory of her youngest child taking her first steps, which Caroline found sweet and reflective of the special bonds in families. Melanie expressed gratitude for her family, and Caroline praised her for having an awesome family.\nOn August 14, 2023, Melanie and Caroline discussed their recent experiences, with Melanie sharing her enjoyment of a concert for her daughter's birthday and Caroline describing her attendance at an advocacy event focused on love and support. Melanie inquired about Caroline's pride parade experience, leading Caroline to express pride in being part of the LGBTQ community and her commitment to fighting for equality. Melanie emphasized the importance of creating a loving and inclusive environment for their children. Caroline shared that she incorporates inclusivity and diversity into her artwork to advocate for LGBTQ+ acceptance, and that her art expresses her trans experience and aims to foster understanding of the trans community. Caroline shared a painting titled \"Embracing Identity,\" which represents self-acceptance and love, and explained that art has aided her self-discovery and acceptance. Melanie acknowledged the healing power of art and praised Caroline's work. Both Melanie and Caroline invited each other to reach out anytime, reinforcing a supportive relationship.",
"searchResultsCount": 3,
"hasContext": true,
"contextLength": 2810,
"answerInContext": false,
"conversationId": "locomo_1",
"facts": [
"The support group is associated with LGBTQ.",
"Caroline joined Connected LGBTQ Activists.",
"Caroline is a member of the LGBTQ community."
]
}
],
"totalQuestions": 199
}
]
}

View File

@ -1,298 +0,0 @@
#!/usr/bin/env node
const fs = require("fs");
const path = require("path");
const axios = require("axios");
/**
* LOCOMO Conversation Ingestion Script
* Ingests LOCOMO conversations into C.O.R.E memory system
* Tracks ingestion status to avoid duplicates
*/
class LocomoIngester {
constructor(baseUrl = process.env.BASE_URL) {
this.baseUrl = baseUrl;
this.headers = {
Authorization: `Bearer ${process.env.API_KEY}`,
};
this.statusFile = path.join(__dirname, "ingestion_status.json");
// Create axios instance with default config
this.axios = axios.create({
baseURL: this.baseUrl,
headers: this.headers,
timeout: 10000, // 10 second timeout
});
}
async makeRequest(endpoint, data) {
try {
const response = await this.axios.post(endpoint, data, {
headers: {
"Content-Type": "application/json",
},
});
return response.data;
} catch (error) {
if (error.response) {
// Server responded with error status
throw new Error(`HTTP ${error.response.status}: ${JSON.stringify(error.response.data)}`);
} else if (error.request) {
// Request was made but no response received
throw new Error(`No response received: ${error.message}`);
} else {
// Something else happened
throw new Error(`Request error: ${error.message}`);
}
}
}
loadIngestionStatus() {
try {
if (fs.existsSync(this.statusFile)) {
return JSON.parse(fs.readFileSync(this.statusFile, "utf8"));
}
} catch (error) {
console.warn("Could not load ingestion status:", error.message);
}
return { conversations: {}, timestamp: null };
}
saveIngestionStatus(status) {
fs.writeFileSync(this.statusFile, JSON.stringify(status, null, 2));
}
async ingestConversation(conversation, conversationId, forceReingest = false) {
const status = this.loadIngestionStatus();
const sessionId =
Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15);
if (status.conversations[conversationId] && !forceReingest) {
console.log(`Conversation ${conversationId} already ingested, skipping...`);
return false;
}
console.log(`Ingesting conversation ${conversationId}...`);
const episodes = this.formatConversationForIngestion(conversation, conversationId);
let successCount = 0;
let errorCount = 0;
for (const [index, episode] of episodes.entries()) {
// if (index >= 0 && index < 20) {
try {
const payload = {
episodeBody: episode.content,
referenceTime: episode.metadata.timestamp,
source: "locomo_benchmark",
sessionId: `${sessionId}-${episode.metadata.sessionNumber}`,
};
await this.makeRequest("/api/v1/add", payload);
successCount++;
// Progress indicator
if ((index + 1) % 10 === 0) {
console.log(` Ingested ${index + 1}/${episodes.length} episodes`);
}
// Small delay to avoid overwhelming the system
await new Promise((resolve) => setTimeout(resolve, 50));
} catch (error) {
console.error(` Error ingesting episode ${index}:`, error.message);
errorCount++;
}
// }
}
// Update status
status.conversations[conversationId] = {
ingested: true,
timestamp: new Date().toISOString(),
totalEpisodes: episodes.length,
successCount,
errorCount,
};
status.timestamp = new Date().toISOString();
this.saveIngestionStatus(status);
console.log(` Completed: ${successCount} success, ${errorCount} errors`);
return true;
}
formatConversationForIngestion(conversation, conversationId) {
const episodes = [];
const conv = conversation.conversation;
// Extract speaker names
const speakerA = conv.speaker_a;
const speakerB = conv.speaker_b;
// Process each session
Object.keys(conv).forEach((key) => {
if (key.startsWith("session_") && !key.endsWith("_date_time")) {
const sessionNumber = key.replace("session_", "");
const sessionData = conv[key];
const sessionDateTime = conv[`session_${sessionNumber}_date_time`];
if (Array.isArray(sessionData)) {
sessionData.forEach((dialog, dialogIndex) => {
episodes.push({
content: `${dialog.speaker}: ${dialog.blip_caption ? `Shared ${dialog.blip_caption}.` : ""} ${dialog.text}`,
metadata: {
conversationId,
sessionNumber: parseInt(sessionNumber),
dialogIndex,
dialogId: dialog.dia_id,
timestamp: sessionDateTime
? new Date(
Date.parse(
sessionDateTime.replace(
/(\d+):(\d+) (am|pm) on (\d+) (\w+), (\d+)/,
(_, hours, minutes, ampm, day, month, year) => {
const monthMap = {
January: 1,
Jan: 1,
February: 2,
Feb: 2,
March: 3,
Mar: 3,
April: 4,
Apr: 4,
May: 5,
June: 6,
Jun: 6,
July: 7,
Jul: 7,
August: 8,
Aug: 8,
September: 9,
Sep: 9,
October: 10,
Oct: 10,
November: 11,
Nov: 11,
December: 12,
Dec: 12,
};
const monthNum = monthMap[month] || 1;
return `${year}-${monthNum.toString().padStart(2, "0")}-${day.padStart(2, "0")} ${hours}:${minutes} ${ampm}`;
}
)
)
).toISOString()
: null,
speaker: dialog.speaker,
speakerA,
speakerB,
source: "locomo_benchmark",
},
});
});
}
}
});
return episodes;
}
async ingestAll(forceReingest = false) {
console.log("Starting LOCOMO conversation ingestion...");
if (forceReingest) {
console.log("Force re-ingestion enabled - will overwrite existing data");
}
// Load LOCOMO dataset
const dataPath = path.join(__dirname, "locomo10.json");
const conversations = JSON.parse(fs.readFileSync(dataPath, "utf8"));
console.log(`Loaded ${conversations.length} conversations`);
let ingestedCount = 0;
let skippedCount = 0;
// Ingest each conversation
for (let i = 0; i < conversations.length; i++) {
if (i === 0) {
const conversation = conversations[i];
const conversationId = `locomo_${i + 1}`;
try {
const wasIngested = await this.ingestConversation(
conversation,
conversationId,
forceReingest
);
if (wasIngested) {
ingestedCount++;
} else {
skippedCount++;
}
} catch (error) {
console.error(`Error with conversation ${conversationId}:`, error.message);
}
}
}
this.printSummary(ingestedCount, skippedCount);
}
printSummary(ingestedCount, skippedCount) {
console.log("\n=== INGESTION SUMMARY ===");
console.log(`Conversations ingested: ${ingestedCount}`);
console.log(`Conversations skipped: ${skippedCount}`);
console.log(`Status file: ${this.statusFile}`);
const status = this.loadIngestionStatus();
const totalEpisodes = Object.values(status.conversations).reduce(
(sum, conv) => sum + (conv.totalEpisodes || 0),
0
);
const totalSuccess = Object.values(status.conversations).reduce(
(sum, conv) => sum + (conv.successCount || 0),
0
);
console.log(`Total episodes ingested: ${totalSuccess}/${totalEpisodes}`);
console.log("\nReady for evaluation phase!");
}
getStatus() {
const status = this.loadIngestionStatus();
const conversations = Object.keys(status.conversations).length;
const totalEpisodes = Object.values(status.conversations).reduce(
(sum, conv) => sum + (conv.successCount || 0),
0
);
return {
conversations,
episodes: totalEpisodes,
lastIngestion: status.timestamp,
};
}
}
// Command line interface
if (require.main === module) {
const args = process.argv.slice(2);
const forceReingest = args.includes("--force");
const showStatus = args.includes("--status");
const ingester = new LocomoIngester();
if (showStatus) {
const status = ingester.getStatus();
console.log("LOCOMO Ingestion Status:");
console.log(` Conversations: ${status.conversations}`);
console.log(` Episodes: ${status.episodes}`);
console.log(` Last ingestion: ${status.lastIngestion || "Never"}`);
} else {
ingester.ingestAll(forceReingest).catch(console.error);
}
}
module.exports = LocomoIngester;

View File

@ -1,265 +0,0 @@
#!/usr/bin/env node
const fs = require("fs");
const path = require("path");
const axios = require("axios");
/**
* LOCOMO Session Summary Ingestion Script
* Ingests LOCOMO session summaries - comprehensive and available for all conversations
* More efficient than full conversations while preserving all key information
*/
class LocomoSessionIngester {
constructor(baseUrl = process.env.BASE_URL) {
this.baseUrl = baseUrl;
this.headers = {
Authorization: `Bearer ${process.env.API_KEY}`,
};
this.statusFile = path.join(__dirname, "session_ingestion_status.json");
// Create axios instance with default config
this.axios = axios.create({
baseURL: this.baseUrl,
headers: this.headers,
timeout: 10000,
});
}
async makeRequest(endpoint, data) {
try {
const response = await this.axios.post(endpoint, data, {
headers: {
"Content-Type": "application/json",
},
});
return response.data;
} catch (error) {
if (error.response) {
throw new Error(`HTTP ${error.response.status}: ${JSON.stringify(error.response.data)}`);
} else if (error.request) {
throw new Error(`No response received: ${error.message}`);
} else {
throw new Error(`Request error: ${error.message}`);
}
}
}
loadIngestionStatus() {
try {
if (fs.existsSync(this.statusFile)) {
return JSON.parse(fs.readFileSync(this.statusFile, "utf8"));
}
} catch (error) {
console.warn("Could not load ingestion status:", error.message);
}
return { conversations: {}, timestamp: null };
}
saveIngestionStatus(status) {
fs.writeFileSync(this.statusFile, JSON.stringify(status, null, 2));
}
formatSessionSummaryForIngestion(conversation, conversationId) {
const episodes = [];
const sessionSummary = conversation.session_summary;
const conv = conversation.conversation;
const speakerA = conv.speaker_a;
const speakerB = conv.speaker_b;
// Process each session summary
Object.entries(sessionSummary).forEach(([sessionKey, summary]) => {
const sessionNumber = sessionKey.replace("session_", "").replace("_summary", "");
episodes.push({
content: `Session ${sessionNumber} Summary: ${summary}`,
metadata: {
conversationId,
sessionNumber: parseInt(sessionNumber),
speakerA,
speakerB,
source: "locomo_sessions",
type: "session_summary",
},
});
});
return episodes;
}
async ingestConversation(conversation, conversationId, forceReingest = false) {
const status = this.loadIngestionStatus();
if (status.conversations[conversationId] && !forceReingest) {
console.log(`Conversation ${conversationId} already ingested, skipping...`);
return false;
}
console.log(`Ingesting session summaries for conversation ${conversationId}...`);
const episodes = this.formatSessionSummaryForIngestion(conversation, conversationId);
let successCount = 0;
let errorCount = 0;
console.log(` Total sessions to ingest: ${episodes.length}`);
for (const [index, episode] of episodes.entries()) {
try {
const payload = {
episodeBody: episode.content,
referenceTime: new Date(Date.now() + index * 1000).toISOString(),
source: "locomo_sessions",
};
await this.makeRequest("/api/v1/add", payload);
successCount++;
// Progress indicator
if ((index + 1) % 10 === 0) {
console.log(` Ingested ${index + 1}/${episodes.length} sessions`);
}
// Small delay
await new Promise((resolve) => setTimeout(resolve, 100));
} catch (error) {
console.error(` Error ingesting session ${index}:`, error.message);
errorCount++;
}
}
// Update status
status.conversations[conversationId] = {
ingested: true,
timestamp: new Date().toISOString(),
totalEpisodes: episodes.length,
successCount,
errorCount,
};
status.timestamp = new Date().toISOString();
this.saveIngestionStatus(status);
console.log(` Completed: ${successCount} success, ${errorCount} errors`);
return true;
}
async ingestAll(forceReingest = false) {
console.log("Starting LOCOMO session summary ingestion...");
if (forceReingest) {
console.log("Force re-ingestion enabled");
}
// Load LOCOMO dataset
const dataPath = path.join(__dirname, "data", "locomo10.json");
const conversations = JSON.parse(fs.readFileSync(dataPath, "utf8"));
console.log(`Loaded ${conversations.length} conversations`);
let ingestedCount = 0;
let skippedCount = 0;
// Test connection first
try {
console.log("Testing connection...");
await this.makeRequest("/api/v1/add", {
episodeBody: "Session ingestion test",
referenceTime: new Date().toISOString(),
source: "test",
});
console.log("Connection test successful");
} catch (error) {
console.error("Connection test failed:", error.message);
return;
}
// Ingest all conversations
for (let i = 0; i < conversations.length; i++) {
const conversation = conversations[i];
const conversationId = `locomo_sessions_${i + 1}`;
if (i === 0) {
try {
const wasIngested = await this.ingestConversation(
conversation,
conversationId,
forceReingest
);
if (wasIngested) {
ingestedCount++;
} else {
skippedCount++;
}
} catch (error) {
console.error(`Error with conversation ${conversationId}:`, error.message);
}
}
}
this.printSummary(ingestedCount, skippedCount);
}
printSummary(ingestedCount, skippedCount) {
console.log("\n=== SESSION SUMMARY INGESTION ===");
console.log(`Conversations processed: ${ingestedCount}`);
console.log(`Conversations skipped: ${skippedCount}`);
const status = this.loadIngestionStatus();
const totalSessions = Object.values(status.conversations).reduce(
(sum, conv) => sum + (conv.totalEpisodes || 0),
0
);
const totalSuccess = Object.values(status.conversations).reduce(
(sum, conv) => sum + (conv.successCount || 0),
0
);
const totalErrors = Object.values(status.conversations).reduce(
(sum, conv) => sum + (conv.errorCount || 0),
0
);
console.log(`Total sessions ingested: ${totalSuccess}/${totalSessions}`);
console.log(
`Success rate: ${((totalSuccess / (totalSuccess + totalErrors || 1)) * 100).toFixed(1)}%`
);
console.log("\nReady for evaluation phase!");
console.log("Benefits: Fast ingestion, comprehensive summaries, all conversations covered");
}
getStatus() {
const status = this.loadIngestionStatus();
const conversations = Object.keys(status.conversations).length;
const totalSessions = Object.values(status.conversations).reduce(
(sum, conv) => sum + (conv.successCount || 0),
0
);
return {
conversations,
sessions: totalSessions,
lastIngestion: status.timestamp,
};
}
}
// Command line interface
if (require.main === module) {
const args = process.argv.slice(2);
const forceReingest = args.includes("--force");
const showStatus = args.includes("--status");
const ingester = new LocomoSessionIngester();
if (showStatus) {
const status = ingester.getStatus();
console.log("LOCOMO Session Ingestion Status:");
console.log(` Conversations: ${status.conversations}`);
console.log(` Sessions: ${status.sessions}`);
console.log(` Last ingestion: ${status.lastIngestion || "Never"}`);
} else {
ingester.ingestAll(forceReingest).catch(console.error);
}
}
module.exports = LocomoSessionIngester;

View File

@ -1,12 +0,0 @@
{
"conversations": {
"locomo_1": {
"ingested": true,
"timestamp": "2025-08-12T05:31:39.437Z",
"totalEpisodes": 419,
"successCount": 419,
"errorCount": 0
}
},
"timestamp": "2025-08-12T05:31:39.441Z"
}

File diff suppressed because one or more lines are too long

View File

@ -1,295 +0,0 @@
{
"name": "locomo",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "locomo",
"version": "1.0.0",
"license": "ISC",
"dependencies": {
"axios": "^1.11.0"
}
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
"license": "MIT"
},
"node_modules/axios": {
"version": "1.11.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz",
"integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.4",
"proxy-from-env": "^1.1.0"
}
},
"node_modules/call-bind-apply-helpers": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
"integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"function-bind": "^1.1.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"license": "MIT",
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"license": "MIT",
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/dunder-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.1",
"es-errors": "^1.3.0",
"gopd": "^1.2.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-define-property": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-errors": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-object-atoms": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
"integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-set-tostringtag": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.6",
"has-tostringtag": "^1.0.2",
"hasown": "^2.0.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/follow-redirects": {
"version": "1.15.11",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz",
"integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"license": "MIT",
"engines": {
"node": ">=4.0"
},
"peerDependenciesMeta": {
"debug": {
"optional": true
}
}
},
"node_modules/form-data": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/function-bind": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/get-intrinsic": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
"integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.2",
"es-define-property": "^1.0.1",
"es-errors": "^1.3.0",
"es-object-atoms": "^1.1.1",
"function-bind": "^1.1.2",
"get-proto": "^1.0.1",
"gopd": "^1.2.0",
"has-symbols": "^1.1.0",
"hasown": "^2.0.2",
"math-intrinsics": "^1.1.0"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/get-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
"integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
"license": "MIT",
"dependencies": {
"dunder-proto": "^1.0.1",
"es-object-atoms": "^1.0.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/gopd": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/has-symbols": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/has-tostringtag": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
"license": "MIT",
"dependencies": {
"has-symbols": "^1.0.3"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/hasown": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
"license": "MIT",
"dependencies": {
"function-bind": "^1.1.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/math-intrinsics": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"license": "MIT",
"dependencies": {
"mime-db": "1.52.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
"license": "MIT"
}
}
}

View File

@ -1,23 +0,0 @@
{
"name": "locomo",
"version": "1.0.0",
"description": "**Authors**: [Adyasha Maharana](https://adymaharana.github.io/), [Dong-Ho Lee](https://www.danny-lee.info/), [Sergey Tulyakov](https://stulyakov.com/), [Mohit Bansal](https://www.cs.unc.edu/~mbansal/), [Francesco Barbieri](https://fvancesco.github.io/) and [Yuwei Fang](https://yuwfan.github.io/)",
"main": "debug_auth.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "git+https://github.com/snap-research/locomo.git"
},
"keywords": [],
"author": "",
"license": "ISC",
"bugs": {
"url": "https://github.com/snap-research/locomo/issues"
},
"homepage": "https://github.com/snap-research/locomo#readme",
"dependencies": {
"axios": "^1.11.0"
}
}

View File

@ -1,4 +1,4 @@
VERSION=0.1.16
VERSION=0.1.17
# Nest run in docker, change host to database container name
DB_HOST=postgres
@ -97,7 +97,7 @@ TRIGGER_IMAGE_TAG=v4-beta
# - In production, these should be set to the public URL of your webapp, e.g. https://trigger.example.com
APP_ORIGIN=http://localhost:8030
LOGIN_ORIGIN=http://localhost:8030
API_ORIGIN=http://localhost:8030
API_ORIGIN=http://trigger-webapp:3000
DEV_OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:8030/otel
# You may need to set this when testing locally or when using the combined setup
# API_ORIGIN=http://webapp:3000

View File

@ -1,107 +0,0 @@
version: "3.8"
services:
core:
container_name: core-app
image: redplanethq/core:${VERSION}
environment:
- NODE_ENV=${NODE_ENV}
- DATABASE_URL=${DATABASE_URL}
- DIRECT_URL=${DIRECT_URL}
- SESSION_SECRET=${SESSION_SECRET}
- ENCRYPTION_KEY=${ENCRYPTION_KEY}
- MAGIC_LINK_SECRET=${MAGIC_LINK_SECRET}
- LOGIN_ORIGIN=${LOGIN_ORIGIN}
- APP_ORIGIN=${APP_ORIGIN}
- REDIS_HOST=${REDIS_HOST}
- REDIS_PORT=${REDIS_PORT}
- REDIS_TLS_DISABLED=${REDIS_TLS_DISABLED}
- NEO4J_URI=${NEO4J_URI}
- NEO4J_USERNAME=${NEO4J_USERNAME}
- NEO4J_PASSWORD=${NEO4J_PASSWORD}
- OPENAI_API_KEY=${OPENAI_API_KEY}
- AUTH_GOOGLE_CLIENT_ID=${AUTH_GOOGLE_CLIENT_ID}
- AUTH_GOOGLE_CLIENT_SECRET=${AUTH_GOOGLE_CLIENT_SECRET}
- ENABLE_EMAIL_LOGIN=${ENABLE_EMAIL_LOGIN}
- OLLAMA_URL=${OLLAMA_URL}
- EMBEDDING_MODEL=${EMBEDDING_MODEL}
- MODEL=${MODEL}
- TRIGGER_PROJECT_ID=${TRIGGER_PROJECT_ID}
- TRIGGER_SECRET_KEY=${TRIGGER_SECRET_KEY}
- TRIGGER_API_URL=${TRIGGER_API_URL}
- POSTGRES_DB=${POSTGRES_DB}
ports:
- "3033:3000"
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_started
neo4j:
condition: service_healthy
networks:
- core
- trigger-webapp
postgres:
container_name: core-postgres
image: tegonhq/tegon-postgres:0.1.0-alpha
environment:
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- POSTGRES_DB=${POSTGRES_DB}
ports:
- "5432:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
networks:
- core
healthcheck:
test: ["CMD-SHELL", "pg_isready"]
interval: 10s
timeout: 5s
retries: 5
start_period: 10s
redis:
container_name: core-redis
image: redis:7
ports:
- "6379:6379"
networks:
- core
neo4j:
container_name: core-neo4j
image: neo4j:5
environment:
- NEO4J_AUTH=${NEO4J_AUTH}
- NEO4J_dbms_security_procedures_unrestricted=gds.*,apoc.*
- NEO4J_dbms_security_procedures_allowlist=gds.*,apoc.*
- NEO4J_apoc_export_file_enabled=true # Enable file export
- NEO4J_apoc_import_file_enabled=true # Enable file import
- NEO4J_apoc_import_file_use_neo4j_config=true
- NEO4J_dbms_memory_heap_initial__size=1G
- NEO4J_dbms_memory_heap_max__size=2G
ports:
- "7474:7474"
- "7687:7687"
volumes:
- neo4j_data:/data
networks:
- core
healthcheck:
test: ["CMD-SHELL", "cypher-shell -u $NEO4J_USERNAME -p $NEO4J_PASSWORD 'RETURN 1'"]
interval: 10s
timeout: 5s
retries: 10
start_period: 20s
networks:
core:
name: core-network
driver: bridge
volumes:
postgres_data:
neo4j_data:

View File

@ -1,7 +1,7 @@
{
"name": "core",
"private": true,
"version": "0.1.16",
"version": "0.1.17",
"workspaces": [
"apps/*",
"packages/*"

View File

@ -0,0 +1,21 @@
-- CreateTable
CREATE TABLE "MCPSession" (
"id" TEXT NOT NULL,
"source" TEXT NOT NULL,
"integrations" TEXT[],
CONSTRAINT "MCPSession_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "MCPSessionLog" (
"id" TEXT NOT NULL,
"mcpSessionId" TEXT NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"deleted" TIMESTAMP(3),
CONSTRAINT "MCPSessionLog_pkey" PRIMARY KEY ("id")
);
-- AddForeignKey
ALTER TABLE "MCPSessionLog" ADD CONSTRAINT "MCPSessionLog_mcpSessionId_fkey" FOREIGN KEY ("mcpSessionId") REFERENCES "MCPSession"("id") ON DELETE RESTRICT ON UPDATE CASCADE;

View File

@ -0,0 +1,15 @@
/*
Warnings:
- You are about to drop the `MCPSessionLog` table. If the table is not empty, all the data it contains will be lost.
*/
-- DropForeignKey
ALTER TABLE "MCPSessionLog" DROP CONSTRAINT "MCPSessionLog_mcpSessionId_fkey";
-- AlterTable
ALTER TABLE "MCPSession" ADD COLUMN "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
ADD COLUMN "deleted" TIMESTAMP(3);
-- DropTable
DROP TABLE "MCPSessionLog";

View File

@ -210,6 +210,15 @@ model InvitationCode {
createdAt DateTime @default(now())
}
model MCPSession {
id String @id @default(cuid())
source String
integrations String[]
createdAt DateTime @default(now())
deleted DateTime?
}
model OAuthAuthorizationCode {
id String @id @default(cuid())