fix: session_id for memory ingestion

This commit is contained in:
Harshith Mullapudi 2025-10-30 00:59:52 +05:30
parent a57996a510
commit 9fe54a3bb8
9 changed files with 72 additions and 67 deletions

View File

@ -32,7 +32,7 @@ async function runBertWithExec(
minTopicSize: number,
nrTopics?: number,
): Promise<string> {
let command = `python3 /core/apps/webapp/app/bert/main.py ${userId} --json`;
let command = `python3 /core/apps/webapp/python/main.py ${userId} --json`;
if (minTopicSize) {
command += ` --min-topic-size ${minTopicSize}`;

View File

@ -36,7 +36,7 @@ export interface SessionCompactionResult {
}
// Zod schema for LLM response validation
const CompactionResultSchema = z.object({
export const CompactionResultSchema = z.object({
summary: z.string().describe("Consolidated narrative of the entire session"),
confidence: z
.number()
@ -45,7 +45,7 @@ const CompactionResultSchema = z.object({
.describe("Confidence score of the compaction quality"),
});
const CONFIG = {
export const CONFIG = {
minEpisodesForCompaction: 5, // Minimum episodes to trigger compaction
compactionThreshold: 1, // Trigger after N new episodes
maxEpisodesPerBatch: 50, // Process in batches if needed

View File

@ -16,7 +16,6 @@ import {
updateSpace,
} from "./graphModels/space";
import { prisma } from "~/trigger/utils/prisma";
import { trackFeatureUsage } from "./telemetry.server";
export class SpaceService {
/**
@ -64,7 +63,7 @@ export class SpaceService {
logger.info(`Created space ${space.id} successfully`);
// Track space creation
trackFeatureUsage("space_created", params.userId).catch(console.error);
// trackFeatureUsage("space_created", params.userId).catch(console.error);
return space;
}
@ -177,9 +176,6 @@ export class SpaceService {
logger.info(`Nothing to update to graph`);
}
// Track space update
trackFeatureUsage("space_updated", userId).catch(console.error);
logger.info(`Updated space ${spaceId} successfully`);
return space;
}

View File

@ -24,7 +24,7 @@ async function runBertWithTriggerPython(
`[BERT Topic Analysis] Running with Trigger.dev Python: args=${args.join(" ")}`,
);
const result = await python.runScript("./apps/webapp/app/bert/main.py", args);
const result = await python.runScript("./python/main.py", args);
return result.stdout;
}

View File

@ -76,6 +76,11 @@ const IngestSchema = {
description:
"The conversation text to store. Include both what the user asked and what you answered. Keep it concise but complete.",
},
sessionId: {
type: "string",
description:
"IMPORTANT: Session ID (UUID) is required to track the conversation session. If you don't have a sessionId in your context, you MUST call the get_session_id tool first to obtain one before calling memory_ingest.",
},
spaceIds: {
type: "array",
items: {
@ -85,14 +90,14 @@ const IngestSchema = {
"Optional: Array of space UUIDs (from memory_get_spaces). Add this to organize the memory by project. Example: If discussing 'core' project, include the 'core' space ID. Leave empty to store in general memory.",
},
},
required: ["message"],
required: ["message", "sessionId"],
};
export const memoryTools = [
{
name: "memory_ingest",
description:
"Store conversation in memory for future reference. USE THIS TOOL: At the END of every conversation after fully answering the user. WHAT TO STORE: 1) User's question or request, 2) Your solution or explanation, 3) Important decisions made, 4) Key insights discovered. HOW TO USE: Put the entire conversation summary in the 'message' field. Optionally add spaceIds array to organize by project. Returns: Success confirmation with storage ID.",
"Store conversation in memory for future reference. USE THIS TOOL: At the END of every conversation after fully answering the user. WHAT TO STORE: 1) User's question or request, 2) Your solution or explanation, 3) Important decisions made, 4) Key insights discovered. HOW TO USE: Put the entire conversation summary in the 'message' field. IMPORTANT: You MUST provide a sessionId - if you don't have one in your context, call get_session_id tool first to obtain it. Optionally add spaceIds array to organize by project. Returns: Success confirmation with storage ID.",
inputSchema: IngestSchema,
},
{
@ -177,7 +182,7 @@ export const memoryTools = [
{
name: "get_integration_actions",
description:
"Get list of actions available for a specific integration. USE THIS TOOL: After get_integrations to see what operations you can perform. For example, GitHub integration has actions like 'get_pr', 'get_issues', 'create_issue'. HOW TO USE: Provide the integrationSlug from get_integrations (like 'github', 'linear', 'slack').",
"Get list of actions available for a specific integration. USE THIS TOOL: After get_integrations to see what operations you can perform. For example, GitHub integration has actions like 'get_pr', 'get_issues', 'create_issue'. HOW TO USE: Provide the integrationSlug from get_integrations (like 'github', 'linear', 'slack'). Returns: Array of actions with name, description, and inputSchema for each.",
inputSchema: {
type: "object",
properties: {
@ -193,7 +198,7 @@ export const memoryTools = [
{
name: "execute_integration_action",
description:
"Execute an action on an integration (fetch GitHub PR, create Linear issue, send Slack message, etc.). USE THIS TOOL: After using get_integration_actions to see available actions. HOW TO USE: 1) Set integrationSlug (like 'github'), 2) Set action name (like 'get_pr'), 3) Set arguments object with required parameters from the action's inputSchema.",
"Execute an action on an integration (fetch GitHub PR, create Linear issue, send Slack message, etc.). USE THIS TOOL: After using get_integration_actions to see available actions. HOW TO USE: 1) Set integrationSlug (like 'github'), 2) Set action name (like 'get_pr'), 3) Set arguments object with required parameters from the action's inputSchema. Returns: Result of the action execution.",
inputSchema: {
type: "object",
properties: {
@ -351,6 +356,7 @@ async function handleMemoryIngest(args: any) {
source: args.source,
type: EpisodeTypeEnum.CONVERSATION,
spaceIds,
sessionId: args.sessionId,
},
args.userId,
);

View File

@ -4,7 +4,7 @@ import { prismaExtension } from "@trigger.dev/build/extensions/prisma";
import { pythonExtension } from "@trigger.dev/python/extension";
export default defineConfig({
project: "proj_dtctdgjvszcisssppudu",
project: process.env.TRIGGER_PROJECT_ID as string,
runtime: "node",
logLevel: "log",
// The max compute seconds a task is allowed to run. If the task run exceeds this duration, it will be stopped.

View File

@ -79,11 +79,11 @@ COPY --from=builder --chown=node:node /core/apps/webapp/public ./apps/webapp/pub
COPY --from=builder --chown=node:node /core/scripts ./scripts
# Install BERT Python dependencies
COPY --chown=node:node apps/webapp/app/bert/requirements.txt ./apps/webapp/app/bert/requirements.txt
RUN pip3 install --no-cache-dir -r ./apps/webapp/app/bert/requirements.txt
COPY --chown=node:node apps/webapp/python/requirements.txt ./apps/webapp/python/requirements.txt
RUN pip3 install --no-cache-dir -r ./apps/webapp/python/requirements.txt
# Copy BERT scripts
COPY --chown=node:node apps/webapp/app/bert/main.py ./apps/webapp/app/bert/main.py
COPY --chown=node:node apps/webapp/python/main.py ./apps/webapp/python/main.py
EXPOSE 3000

View File

@ -52,4 +52,6 @@ MODEL=gpt-4.1-2025-04-14
## for opensource embedding model
# EMBEDDING_MODEL=mxbai-embed-large
QUEUE_PROVIDER=bullmq
QUEUE_PROVIDER=bullmq
TELEMETRY_ENABLED=false

View File

@ -8,55 +8,56 @@ x-logging: &logging-config
version: "3.8"
services:
# core:
# container_name: core-app
# image: redplanethq/core:${VERSION}
# environment:
# - NODE_ENV=${NODE_ENV}
# - DATABASE_URL=${DATABASE_URL}
# - DIRECT_URL=${DIRECT_URL}
# - SESSION_SECRET=${SESSION_SECRET}
# - ENCRYPTION_KEY=${ENCRYPTION_KEY}
# - MAGIC_LINK_SECRET=${MAGIC_LINK_SECRET}
# - LOGIN_ORIGIN=${CORE_LOGIN_ORIGIN}
# - APP_ORIGIN=${CORE_APP_ORIGIN}
# - REDIS_HOST=${REDIS_HOST}
# - REDIS_PORT=${REDIS_PORT}
# - REDIS_PASSWORD=${REDIS_PASSWORD}
# - REDIS_TLS_DISABLED=${REDIS_TLS_DISABLED}
# - NEO4J_URI=${NEO4J_URI}
# - NEO4J_USERNAME=${NEO4J_USERNAME}
# - NEO4J_PASSWORD=${NEO4J_PASSWORD}
# - OPENAI_API_KEY=${OPENAI_API_KEY}
# - AUTH_GOOGLE_CLIENT_ID=${AUTH_GOOGLE_CLIENT_ID}
# - AUTH_GOOGLE_CLIENT_SECRET=${AUTH_GOOGLE_CLIENT_SECRET}
# - ENABLE_EMAIL_LOGIN=${ENABLE_EMAIL_LOGIN}
# - OLLAMA_URL=${OLLAMA_URL}
# - EMBEDDING_MODEL=${EMBEDDING_MODEL}
# - MODEL=${MODEL}
# - TRIGGER_PROJECT_ID=${TRIGGER_PROJECT_ID}
# - TRIGGER_SECRET_KEY=${TRIGGER_SECRET_KEY}
# - TRIGGER_API_URL=${API_ORIGIN}
# - POSTGRES_DB=${POSTGRES_DB}
# - EMAIL_TRANSPORT=${EMAIL_TRANSPORT}
# - REPLY_TO_EMAIL=${REPLY_TO_EMAIL}
# - FROM_EMAIL=${FROM_EMAIL}
# - RESEND_API_KEY=${RESEND_API_KEY}
# - COHERE_API_KEY=${COHERE_API_KEY}
# - QUEUE_PROVIDER=${QUEUE_PROVIDER}
# - TELEMETRY_ENABLED=${TELEMETRY_ENABLED}
# - TELEMETRY_ANONYMOUS=${TELEMETRY_ANONYMOUS}
# ports:
# - "3033:3000"
# depends_on:
# postgres:
# condition: service_healthy
# redis:
# condition: service_started
# neo4j:
# condition: service_healthy
# networks:
# - core
core:
container_name: core-app
image: redplanethq/core:${VERSION}
environment:
- NODE_ENV=${NODE_ENV}
- DATABASE_URL=${DATABASE_URL}
- DIRECT_URL=${DIRECT_URL}
- SESSION_SECRET=${SESSION_SECRET}
- ENCRYPTION_KEY=${ENCRYPTION_KEY}
- MAGIC_LINK_SECRET=${MAGIC_LINK_SECRET}
- LOGIN_ORIGIN=${CORE_LOGIN_ORIGIN}
- APP_ORIGIN=${CORE_APP_ORIGIN}
- REDIS_HOST=${REDIS_HOST}
- REDIS_PORT=${REDIS_PORT}
- REDIS_PASSWORD=${REDIS_PASSWORD}
- REDIS_TLS_DISABLED=${REDIS_TLS_DISABLED}
- NEO4J_URI=${NEO4J_URI}
- NEO4J_USERNAME=${NEO4J_USERNAME}
- NEO4J_PASSWORD=${NEO4J_PASSWORD}
- OPENAI_API_KEY=${OPENAI_API_KEY}
- AUTH_GOOGLE_CLIENT_ID=${AUTH_GOOGLE_CLIENT_ID}
- AUTH_GOOGLE_CLIENT_SECRET=${AUTH_GOOGLE_CLIENT_SECRET}
- ENABLE_EMAIL_LOGIN=${ENABLE_EMAIL_LOGIN}
- OLLAMA_URL=${OLLAMA_URL}
- EMBEDDING_MODEL=${EMBEDDING_MODEL}
- EMBEDDING_MODEL_SIZE=${EMBEDDING_MODEL_SIZE}
- MODEL=${MODEL}
- TRIGGER_PROJECT_ID=${TRIGGER_PROJECT_ID}
- TRIGGER_SECRET_KEY=${TRIGGER_SECRET_KEY}
- TRIGGER_API_URL=${API_ORIGIN}
- POSTGRES_DB=${POSTGRES_DB}
- EMAIL_TRANSPORT=${EMAIL_TRANSPORT}
- REPLY_TO_EMAIL=${REPLY_TO_EMAIL}
- FROM_EMAIL=${FROM_EMAIL}
- RESEND_API_KEY=${RESEND_API_KEY}
- COHERE_API_KEY=${COHERE_API_KEY}
- QUEUE_PROVIDER=${QUEUE_PROVIDER}
- TELEMETRY_ENABLED=${TELEMETRY_ENABLED}
- TELEMETRY_ANONYMOUS=${TELEMETRY_ANONYMOUS}
ports:
- "3033:3000"
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_started
neo4j:
condition: service_healthy
networks:
- core
postgres:
container_name: core-postgres