mirror of
https://github.com/eliasstepanik/core.git
synced 2026-01-12 03:18:28 +00:00
Compare commits
30 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f038ad5c61 | ||
|
|
4f27d2128b | ||
|
|
c869096be8 | ||
|
|
c5407be54d | ||
|
|
6c37b41ca4 | ||
|
|
023a220d3e | ||
|
|
b9c4fc13c2 | ||
|
|
0ad2bba2ad | ||
|
|
faad985e48 | ||
|
|
8de059bb2e | ||
|
|
76228d6aac | ||
|
|
6ac74a3f0b | ||
|
|
b255bbe7e6 | ||
|
|
da3d06782e | ||
|
|
a727671a30 | ||
|
|
e7ed6eb288 | ||
|
|
5b31c8ed62 | ||
|
|
f39c7cc6d0 | ||
|
|
b78713df41 | ||
|
|
6f1037e8e1 | ||
|
|
af56d7016e | ||
|
|
3a10ee53e8 | ||
|
|
ef1c8eac52 | ||
|
|
33bec831c6 | ||
|
|
8a6b06383e | ||
|
|
60dd4bfa6f | ||
|
|
00f983079f | ||
|
|
170eed76fb | ||
|
|
1db2628af4 | ||
|
|
95636f96a8 |
11
.env.example
11
.env.example
@ -1,4 +1,4 @@
|
|||||||
VERSION=0.1.24
|
VERSION=0.1.27
|
||||||
|
|
||||||
# Nest run in docker, change host to database container name
|
# Nest run in docker, change host to database container name
|
||||||
DB_HOST=localhost
|
DB_HOST=localhost
|
||||||
@ -41,10 +41,7 @@ NEO4J_USERNAME=neo4j
|
|||||||
NEO4J_PASSWORD=27192e6432564f4788d55c15131bd5ac
|
NEO4J_PASSWORD=27192e6432564f4788d55c15131bd5ac
|
||||||
OPENAI_API_KEY=
|
OPENAI_API_KEY=
|
||||||
|
|
||||||
|
|
||||||
MAGIC_LINK_SECRET=27192e6432564f4788d55c15131bd5ac
|
MAGIC_LINK_SECRET=27192e6432564f4788d55c15131bd5ac
|
||||||
|
|
||||||
|
|
||||||
NEO4J_AUTH=neo4j/27192e6432564f4788d55c15131bd5ac
|
NEO4J_AUTH=neo4j/27192e6432564f4788d55c15131bd5ac
|
||||||
OLLAMA_URL=http://ollama:11434
|
OLLAMA_URL=http://ollama:11434
|
||||||
|
|
||||||
@ -56,7 +53,5 @@ AWS_ACCESS_KEY_ID=
|
|||||||
AWS_SECRET_ACCESS_KEY=
|
AWS_SECRET_ACCESS_KEY=
|
||||||
AWS_REGION=us-east-1
|
AWS_REGION=us-east-1
|
||||||
|
|
||||||
## Trigger ##
|
QUEUE_PROVIDER=bullmq
|
||||||
TRIGGER_PROJECT_ID=
|
|
||||||
TRIGGER_SECRET_KEY=
|
|
||||||
TRIGGER_API_URL=http://host.docker.internal:8030
|
|
||||||
|
|||||||
26
.github/workflows/build-docker-image.yml
vendored
26
.github/workflows/build-docker-image.yml
vendored
@ -7,32 +7,6 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-init:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
ref: main
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v1
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v1
|
|
||||||
|
|
||||||
- name: Login to Docker Registry
|
|
||||||
run: echo "${{ secrets.DOCKER_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_USERNAME }}" --password-stdin
|
|
||||||
|
|
||||||
- name: Build and Push Frontend Docker Image
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ./apps/init/Dockerfile
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: redplanethq/init:${{ github.ref_name }}
|
|
||||||
|
|
||||||
build-webapp:
|
build-webapp:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
|||||||
17
.gitignore
vendored
17
.gitignore
vendored
@ -46,13 +46,14 @@ registry/
|
|||||||
|
|
||||||
.cursor
|
.cursor
|
||||||
CLAUDE.md
|
CLAUDE.md
|
||||||
|
AGENTS.md
|
||||||
|
|
||||||
.claude
|
.claude
|
||||||
.clinerules/byterover-rules.md
|
.clinerules
|
||||||
.kilocode/rules/byterover-rules.md
|
.kilocode
|
||||||
.roo/rules/byterover-rules.md
|
.roo
|
||||||
.windsurf/rules/byterover-rules.md
|
.windsurf
|
||||||
.cursor/rules/byterover-rules.mdc
|
.cursor
|
||||||
.kiro/steering/byterover-rules.md
|
.kiro
|
||||||
.qoder/rules/byterover-rules.md
|
.qoder
|
||||||
.augment/rules/byterover-rules.md
|
.augment
|
||||||
7
.vscode/settings.json
vendored
7
.vscode/settings.json
vendored
@ -1,7 +0,0 @@
|
|||||||
{
|
|
||||||
"eslint.workingDirectories": [
|
|
||||||
{
|
|
||||||
"mode": "auto"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
2
LICENSE
2
LICENSE
@ -1,4 +1,4 @@
|
|||||||
Sol License
|
Core License
|
||||||
|
|
||||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||||
Version 3, 19 November 2007
|
Version 3, 19 November 2007
|
||||||
|
|||||||
50
README.md
50
README.md
@ -55,7 +55,7 @@ CORE memory achieves **88.24%** average accuracy in Locomo dataset across all re
|
|||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
**Problem**
|
**Problem**
|
||||||
|
|
||||||
Developers waste time re-explaining context to AI tools. Hit token limits in Claude? Start fresh and lose everything. Switch from ChatGPT/Claude to Cursor? Explain your context again. Your conversations, decisions, and insights vanish between sessions. With every new AI tool, the cost of context switching grows.
|
Developers waste time re-explaining context to AI tools. Hit token limits in Claude? Start fresh and lose everything. Switch from ChatGPT/Claude to Cursor? Explain your context again. Your conversations, decisions, and insights vanish between sessions. With every new AI tool, the cost of context switching grows.
|
||||||
|
|
||||||
@ -64,8 +64,13 @@ Developers waste time re-explaining context to AI tools. Hit token limits in Cla
|
|||||||
CORE is an open-source unified, persistent memory layer for all your AI tools. Your context follows you from Cursor to Claude to ChatGPT to Claude Code. One knowledge graph remembers who said what, when, and why. Connect once, remember everywhere. Stop managing context and start building.
|
CORE is an open-source unified, persistent memory layer for all your AI tools. Your context follows you from Cursor to Claude to ChatGPT to Claude Code. One knowledge graph remembers who said what, when, and why. Connect once, remember everywhere. Stop managing context and start building.
|
||||||
|
|
||||||
## 🚀 CORE Self-Hosting
|
## 🚀 CORE Self-Hosting
|
||||||
|
|
||||||
Want to run CORE on your own infrastructure? Self-hosting gives you complete control over your data and deployment.
|
Want to run CORE on your own infrastructure? Self-hosting gives you complete control over your data and deployment.
|
||||||
|
|
||||||
|
**Quick Deploy Options:**
|
||||||
|
|
||||||
|
[](https://railway.com/deploy/core?referralCode=LHvbIb&utm_medium=integration&utm_source=template&utm_campaign=generic)
|
||||||
|
|
||||||
**Prerequisites**:
|
**Prerequisites**:
|
||||||
|
|
||||||
- Docker (20.10.0+) and Docker Compose (2.20.0+) installed
|
- Docker (20.10.0+) and Docker Compose (2.20.0+) installed
|
||||||
@ -76,15 +81,20 @@ Want to run CORE on your own infrastructure? Self-hosting gives you complete con
|
|||||||
### Setup
|
### Setup
|
||||||
|
|
||||||
1. Clone the repository:
|
1. Clone the repository:
|
||||||
|
|
||||||
```
|
```
|
||||||
git clone https://github.com/RedPlanetHQ/core.git
|
git clone https://github.com/RedPlanetHQ/core.git
|
||||||
cd core
|
cd core
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Configure environment variables in `core/.env`:
|
2. Configure environment variables in `core/.env`:
|
||||||
|
|
||||||
```
|
```
|
||||||
OPENAI_API_KEY=your_openai_api_key
|
OPENAI_API_KEY=your_openai_api_key
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Start the service
|
3. Start the service
|
||||||
|
|
||||||
```
|
```
|
||||||
docker-compose up -d
|
docker-compose up -d
|
||||||
```
|
```
|
||||||
@ -96,6 +106,7 @@ Once deployed, you can configure your AI providers (OpenAI, Anthropic) and start
|
|||||||
Note: We tried open-source models like Ollama or GPT OSS but facts generation were not good, we are still figuring out how to improve on that and then will also support OSS models.
|
Note: We tried open-source models like Ollama or GPT OSS but facts generation were not good, we are still figuring out how to improve on that and then will also support OSS models.
|
||||||
|
|
||||||
## 🚀 CORE Cloud
|
## 🚀 CORE Cloud
|
||||||
|
|
||||||
**Build your unified memory graph in 5 minutes:**
|
**Build your unified memory graph in 5 minutes:**
|
||||||
|
|
||||||
Don't want to manage infrastructure? CORE Cloud lets you build your personal memory system instantly - no setup, no servers, just memory that works.
|
Don't want to manage infrastructure? CORE Cloud lets you build your personal memory system instantly - no setup, no servers, just memory that works.
|
||||||
@ -111,24 +122,24 @@ Don't want to manage infrastructure? CORE Cloud lets you build your personal mem
|
|||||||
|
|
||||||
## 🧩 Key Features
|
## 🧩 Key Features
|
||||||
|
|
||||||
### 🧠 **Unified, Portable Memory**:
|
### 🧠 **Unified, Portable Memory**:
|
||||||
|
|
||||||
Add and recall your memory across **Cursor, Windsurf, Claude Desktop, Claude Code, Gemini CLI, AWS's Kiro, VS Code, and Roo Code** via MCP
|
Add and recall your memory across **Cursor, Windsurf, Claude Desktop, Claude Code, Gemini CLI, AWS's Kiro, VS Code, and Roo Code** via MCP
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
### 🕸️ **Temporal + Reified Knowledge Graph**:
|
||||||
### 🕸️ **Temporal + Reified Knowledge Graph**:
|
|
||||||
|
|
||||||
Remember the story behind every fact—track who said what, when, and why with rich relationships and full provenance, not just flat storage
|
Remember the story behind every fact—track who said what, when, and why with rich relationships and full provenance, not just flat storage
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
### 🌐 **Browser Extension**:
|
||||||
### 🌐 **Browser Extension**:
|
|
||||||
|
|
||||||
Save conversations and content from ChatGPT, Grok, Gemini, Twitter, YouTube, blog posts, and any webpage directly into your CORE memory.
|
Save conversations and content from ChatGPT, Grok, Gemini, Twitter, YouTube, blog posts, and any webpage directly into your CORE memory.
|
||||||
|
|
||||||
**How to Use Extension**
|
**How to Use Extension**
|
||||||
|
|
||||||
1. [Download the Extension](https://chromewebstore.google.com/detail/core-extension/cglndoindnhdbfcbijikibfjoholdjcc) from the Chrome Web Store.
|
1. [Download the Extension](https://chromewebstore.google.com/detail/core-extension/cglndoindnhdbfcbijikibfjoholdjcc) from the Chrome Web Store.
|
||||||
2. Login to [CORE dashboard](https://core.heysol.ai)
|
2. Login to [CORE dashboard](https://core.heysol.ai)
|
||||||
- Navigate to Settings (bottom left)
|
- Navigate to Settings (bottom left)
|
||||||
@ -137,13 +148,12 @@ Save conversations and content from ChatGPT, Grok, Gemini, Twitter, YouTube, blo
|
|||||||
|
|
||||||
https://github.com/user-attachments/assets/6e629834-1b9d-4fe6-ae58-a9068986036a
|
https://github.com/user-attachments/assets/6e629834-1b9d-4fe6-ae58-a9068986036a
|
||||||
|
|
||||||
|
### 💬 **Chat with Memory**:
|
||||||
|
|
||||||
### 💬 **Chat with Memory**:
|
|
||||||
Ask questions like "What are my writing preferences?" with instant insights from your connected knowledge
|
Ask questions like "What are my writing preferences?" with instant insights from your connected knowledge
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|
||||||
### ⚡ **Auto-Sync from Apps**:
|
### ⚡ **Auto-Sync from Apps**:
|
||||||
|
|
||||||
Automatically capture relevant context from Linear, Slack, Notion, GitHub and other connected apps into your CORE memory
|
Automatically capture relevant context from Linear, Slack, Notion, GitHub and other connected apps into your CORE memory
|
||||||
@ -152,16 +162,12 @@ Automatically capture relevant context from Linear, Slack, Notion, GitHub and ot
|
|||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
### 🔗 **MCP Integration Hub**:
|
||||||
### 🔗 **MCP Integration Hub**:
|
|
||||||
|
|
||||||
Connect Linear, Slack, GitHub, Notion once to CORE—then use all their tools in Claude, Cursor, or any MCP client with a single URL
|
Connect Linear, Slack, GitHub, Notion once to CORE—then use all their tools in Claude, Cursor, or any MCP client with a single URL
|
||||||
|
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## How CORE create memory
|
## How CORE create memory
|
||||||
|
|
||||||
<img width="12885" height="3048" alt="memory-ingest-diagram" src="https://github.com/user-attachments/assets/c51679de-8260-4bee-bebf-aff32c6b8e13" />
|
<img width="12885" height="3048" alt="memory-ingest-diagram" src="https://github.com/user-attachments/assets/c51679de-8260-4bee-bebf-aff32c6b8e13" />
|
||||||
@ -175,7 +181,6 @@ CORE’s ingestion pipeline has four phases designed to capture evolving context
|
|||||||
|
|
||||||
The Result: Instead of a flat database, CORE gives you a memory that grows and changes with you - preserving context, evolution, and ownership so agents can actually use it.
|
The Result: Instead of a flat database, CORE gives you a memory that grows and changes with you - preserving context, evolution, and ownership so agents can actually use it.
|
||||||
|
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## How CORE recalls from memory
|
## How CORE recalls from memory
|
||||||
@ -200,7 +205,7 @@ Explore our documentation to get the most out of CORE
|
|||||||
- [Connect Core MCP with Claude](https://docs.heysol.ai/providers/claude)
|
- [Connect Core MCP with Claude](https://docs.heysol.ai/providers/claude)
|
||||||
- [Connect Core MCP with Cursor](https://docs.heysol.ai/providers/cursor)
|
- [Connect Core MCP with Cursor](https://docs.heysol.ai/providers/cursor)
|
||||||
- [Connect Core MCP with Claude Code](https://docs.heysol.ai/providers/claude-code)
|
- [Connect Core MCP with Claude Code](https://docs.heysol.ai/providers/claude-code)
|
||||||
- [Connect Core MCP with Codex](https://docs.heysol.ai/providers/codex)
|
- [Connect Core MCP with Codex](https://docs.heysol.ai/providers/codex)
|
||||||
|
|
||||||
- [Basic Concepts](https://docs.heysol.ai/overview)
|
- [Basic Concepts](https://docs.heysol.ai/overview)
|
||||||
- [API Reference](https://docs.heysol.ai/api-reference/get-user-profile)
|
- [API Reference](https://docs.heysol.ai/api-reference/get-user-profile)
|
||||||
@ -245,16 +250,11 @@ Have questions or feedback? We're here to help:
|
|||||||
<a href="https://github.com/RedPlanetHQ/core/graphs/contributors">
|
<a href="https://github.com/RedPlanetHQ/core/graphs/contributors">
|
||||||
<img src="https://contrib.rocks/image?repo=RedPlanetHQ/core" />
|
<img src="https://contrib.rocks/image?repo=RedPlanetHQ/core" />
|
||||||
</a>
|
</a>
|
||||||
|
<<<<<<< Updated upstream
|
||||||
|
|
||||||
|
<<<<<<< HEAD
|
||||||
|
|
||||||
|
# =======
|
||||||
|
|
||||||
|
> > > > > > > Stashed changes
|
||||||
|
> > > > > > > 62db6c1 (feat: automatic space identification)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
51
apps/init/.gitignore
vendored
51
apps/init/.gitignore
vendored
@ -1,51 +0,0 @@
|
|||||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
|
||||||
|
|
||||||
# Dependencies
|
|
||||||
node_modules
|
|
||||||
.pnp
|
|
||||||
.pnp.js
|
|
||||||
|
|
||||||
# Local env files
|
|
||||||
.env
|
|
||||||
.env.local
|
|
||||||
.env.development.local
|
|
||||||
.env.test.local
|
|
||||||
.env.production.local
|
|
||||||
|
|
||||||
# Testing
|
|
||||||
coverage
|
|
||||||
|
|
||||||
# Turbo
|
|
||||||
.turbo
|
|
||||||
|
|
||||||
# Vercel
|
|
||||||
.vercel
|
|
||||||
|
|
||||||
# Build Outputs
|
|
||||||
.next/
|
|
||||||
out/
|
|
||||||
build
|
|
||||||
dist
|
|
||||||
.tshy/
|
|
||||||
.tshy-build/
|
|
||||||
|
|
||||||
# Debug
|
|
||||||
npm-debug.log*
|
|
||||||
yarn-debug.log*
|
|
||||||
yarn-error.log*
|
|
||||||
|
|
||||||
# Misc
|
|
||||||
.DS_Store
|
|
||||||
*.pem
|
|
||||||
|
|
||||||
docker-compose.dev.yaml
|
|
||||||
|
|
||||||
clickhouse/
|
|
||||||
.vscode/
|
|
||||||
registry/
|
|
||||||
|
|
||||||
.cursor
|
|
||||||
CLAUDE.md
|
|
||||||
|
|
||||||
.claude
|
|
||||||
|
|
||||||
@ -1,70 +0,0 @@
|
|||||||
ARG NODE_IMAGE=node:20.11.1-bullseye-slim@sha256:5a5a92b3a8d392691c983719dbdc65d9f30085d6dcd65376e7a32e6fe9bf4cbe
|
|
||||||
|
|
||||||
FROM ${NODE_IMAGE} AS pruner
|
|
||||||
|
|
||||||
WORKDIR /core
|
|
||||||
|
|
||||||
COPY --chown=node:node . .
|
|
||||||
RUN npx -q turbo@2.5.3 prune --scope=@redplanethq/init --docker
|
|
||||||
RUN find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
|
||||||
|
|
||||||
# Base strategy to have layer caching
|
|
||||||
FROM ${NODE_IMAGE} AS base
|
|
||||||
RUN apt-get update && apt-get install -y openssl dumb-init postgresql-client
|
|
||||||
WORKDIR /core
|
|
||||||
COPY --chown=node:node .gitignore .gitignore
|
|
||||||
COPY --from=pruner --chown=node:node /core/out/json/ .
|
|
||||||
COPY --from=pruner --chown=node:node /core/out/pnpm-lock.yaml ./pnpm-lock.yaml
|
|
||||||
COPY --from=pruner --chown=node:node /core/out/pnpm-workspace.yaml ./pnpm-workspace.yaml
|
|
||||||
|
|
||||||
## Dev deps
|
|
||||||
FROM base AS dev-deps
|
|
||||||
WORKDIR /core
|
|
||||||
# Corepack is used to install pnpm
|
|
||||||
RUN corepack enable
|
|
||||||
ENV NODE_ENV development
|
|
||||||
RUN pnpm install --ignore-scripts --no-frozen-lockfile
|
|
||||||
|
|
||||||
## Production deps
|
|
||||||
FROM base AS production-deps
|
|
||||||
WORKDIR /core
|
|
||||||
# Corepack is used to install pnpm
|
|
||||||
RUN corepack enable
|
|
||||||
ENV NODE_ENV production
|
|
||||||
RUN pnpm install --prod --no-frozen-lockfile
|
|
||||||
|
|
||||||
## Builder (builds the init CLI)
|
|
||||||
FROM base AS builder
|
|
||||||
WORKDIR /core
|
|
||||||
# Corepack is used to install pnpm
|
|
||||||
RUN corepack enable
|
|
||||||
|
|
||||||
COPY --from=pruner --chown=node:node /core/out/full/ .
|
|
||||||
COPY --from=dev-deps --chown=node:node /core/ .
|
|
||||||
COPY --chown=node:node turbo.json turbo.json
|
|
||||||
COPY --chown=node:node .configs/tsconfig.base.json .configs/tsconfig.base.json
|
|
||||||
RUN pnpm run build --filter=@redplanethq/init...
|
|
||||||
|
|
||||||
# Runner
|
|
||||||
FROM ${NODE_IMAGE} AS runner
|
|
||||||
RUN apt-get update && apt-get install -y openssl postgresql-client ca-certificates
|
|
||||||
WORKDIR /core
|
|
||||||
RUN corepack enable
|
|
||||||
ENV NODE_ENV production
|
|
||||||
|
|
||||||
COPY --from=base /usr/bin/dumb-init /usr/bin/dumb-init
|
|
||||||
COPY --from=pruner --chown=node:node /core/out/full/ .
|
|
||||||
COPY --from=production-deps --chown=node:node /core .
|
|
||||||
COPY --from=builder --chown=node:node /core/apps/init/dist ./apps/init/dist
|
|
||||||
|
|
||||||
# Copy the trigger dump file
|
|
||||||
COPY --chown=node:node apps/init/trigger.dump ./apps/init/trigger.dump
|
|
||||||
|
|
||||||
# Copy and set up entrypoint script
|
|
||||||
COPY --chown=node:node apps/init/entrypoint.sh ./apps/init/entrypoint.sh
|
|
||||||
RUN chmod +x ./apps/init/entrypoint.sh
|
|
||||||
|
|
||||||
USER node
|
|
||||||
WORKDIR /core/apps/init
|
|
||||||
ENTRYPOINT ["dumb-init", "--"]
|
|
||||||
CMD ["./entrypoint.sh"]
|
|
||||||
@ -1,197 +0,0 @@
|
|||||||
# Core CLI
|
|
||||||
|
|
||||||
🧠 **CORE - Contextual Observation & Recall Engine**
|
|
||||||
|
|
||||||
A Command-Line Interface for setting up and managing the Core development environment.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm install -g @redplanethq/core
|
|
||||||
```
|
|
||||||
|
|
||||||
## Commands
|
|
||||||
|
|
||||||
### `core init`
|
|
||||||
|
|
||||||
**One-time setup command** - Initializes the Core development environment with full configuration.
|
|
||||||
|
|
||||||
### `core start`
|
|
||||||
|
|
||||||
**Daily usage command** - Starts all Core services (Docker containers).
|
|
||||||
|
|
||||||
### `core stop`
|
|
||||||
|
|
||||||
**Daily usage command** - Stops all Core services (Docker containers).
|
|
||||||
|
|
||||||
## Getting Started
|
|
||||||
|
|
||||||
### Prerequisites
|
|
||||||
|
|
||||||
- **Node.js** (v18.20.0 or higher)
|
|
||||||
- **Docker** and **Docker Compose**
|
|
||||||
- **Git**
|
|
||||||
- **pnpm** package manager
|
|
||||||
|
|
||||||
### Initial Setup
|
|
||||||
|
|
||||||
1. **Clone the Core repository:**
|
|
||||||
```bash
|
|
||||||
git clone https://github.com/redplanethq/core.git
|
|
||||||
cd core
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Run the initialization command:**
|
|
||||||
```bash
|
|
||||||
core init
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **The CLI will guide you through the complete setup process:**
|
|
||||||
|
|
||||||
#### Step 1: Prerequisites Check
|
|
||||||
- The CLI shows a checklist of required tools
|
|
||||||
- Confirms you're in the Core repository directory
|
|
||||||
- Exits with instructions if prerequisites aren't met
|
|
||||||
|
|
||||||
#### Step 2: Environment Configuration
|
|
||||||
|
|
||||||
- Copies `.env.example` to `.env` in the root directory
|
|
||||||
- Copies `trigger/.env.example` to `trigger/.env`
|
|
||||||
- Skips copying if `.env` files already exist
|
|
||||||
|
|
||||||
#### Step 3: Docker Services Startup
|
|
||||||
|
|
||||||
- Starts main Core services: `docker compose up -d`
|
|
||||||
- Starts Trigger.dev services: `docker compose up -d` (in trigger/ directory)
|
|
||||||
- Shows real-time output with progress indicators
|
|
||||||
|
|
||||||
#### Step 4: Database Health Check
|
|
||||||
|
|
||||||
- Verifies PostgreSQL is running on `localhost:5432`
|
|
||||||
- Retries for up to 60 seconds if needed
|
|
||||||
|
|
||||||
#### Step 5: Trigger.dev Setup (Interactive)
|
|
||||||
|
|
||||||
- **If Trigger.dev is not configured:**
|
|
||||||
|
|
||||||
1. Prompts you to open http://localhost:8030
|
|
||||||
2. Asks you to login to Trigger.dev
|
|
||||||
3. Guides you to create an organization and project
|
|
||||||
4. Collects your Project ID and Secret Key
|
|
||||||
5. Updates `.env` with your Trigger.dev configuration
|
|
||||||
6. Restarts Core services with new configuration
|
|
||||||
|
|
||||||
- **If Trigger.dev is already configured:**
|
|
||||||
- Skips setup and shows "Configuration already exists" message
|
|
||||||
|
|
||||||
#### Step 6: Docker Registry Login
|
|
||||||
|
|
||||||
- Displays docker login command with credentials from `.env`
|
|
||||||
- Waits for you to complete the login process
|
|
||||||
|
|
||||||
#### Step 7: Trigger.dev Task Deployment
|
|
||||||
|
|
||||||
- Automatically runs: `npx trigger.dev@v4-beta login -a http://localhost:8030`
|
|
||||||
- Deploys tasks with: `pnpm trigger:deploy`
|
|
||||||
- Shows manual deployment instructions if automatic deployment fails
|
|
||||||
|
|
||||||
#### Step 8: Setup Complete!
|
|
||||||
|
|
||||||
- Confirms all services are running
|
|
||||||
- Shows service URLs and connection information
|
|
||||||
|
|
||||||
## Daily Usage
|
|
||||||
|
|
||||||
After initial setup, use these commands for daily development:
|
|
||||||
|
|
||||||
### Start Services
|
|
||||||
|
|
||||||
```bash
|
|
||||||
core start
|
|
||||||
```
|
|
||||||
|
|
||||||
Starts all Docker containers for Core development.
|
|
||||||
|
|
||||||
### Stop Services
|
|
||||||
|
|
||||||
```bash
|
|
||||||
core stop
|
|
||||||
```
|
|
||||||
|
|
||||||
Stops all Docker containers.
|
|
||||||
|
|
||||||
## Service URLs
|
|
||||||
|
|
||||||
After setup, these services will be available:
|
|
||||||
|
|
||||||
- **Core Application**: http://localhost:3033
|
|
||||||
- **Trigger.dev**: http://localhost:8030
|
|
||||||
- **PostgreSQL**: localhost:5432
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Repository Not Found
|
|
||||||
|
|
||||||
If you run commands outside the Core repository:
|
|
||||||
|
|
||||||
- The CLI will ask you to confirm you're in the Core repository
|
|
||||||
- If not, it provides instructions to clone the repository
|
|
||||||
- Navigate to the Core repository directory before running commands again
|
|
||||||
|
|
||||||
### Docker Issues
|
|
||||||
|
|
||||||
- Ensure Docker is running
|
|
||||||
- Check Docker Compose is installed
|
|
||||||
- Verify you have sufficient system resources
|
|
||||||
|
|
||||||
### Trigger.dev Setup Issues
|
|
||||||
|
|
||||||
- Check container logs: `docker logs trigger-webapp --tail 50`
|
|
||||||
- Ensure you can access http://localhost:8030
|
|
||||||
- Verify your network allows connections to localhost
|
|
||||||
|
|
||||||
### Environment Variables
|
|
||||||
|
|
||||||
The CLI automatically manages these environment variables:
|
|
||||||
|
|
||||||
- `TRIGGER_PROJECT_ID` - Your Trigger.dev project ID
|
|
||||||
- `TRIGGER_SECRET_KEY` - Your Trigger.dev secret key
|
|
||||||
- Docker registry credentials for deployment
|
|
||||||
|
|
||||||
### Manual Trigger.dev Deployment
|
|
||||||
|
|
||||||
If automatic deployment fails, run manually:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npx trigger.dev@v4-beta login -a http://localhost:8030
|
|
||||||
pnpm trigger:deploy
|
|
||||||
```
|
|
||||||
|
|
||||||
## Development Workflow
|
|
||||||
|
|
||||||
1. **First time setup:** `core init`
|
|
||||||
2. **Daily development:**
|
|
||||||
- `core start` - Start your development environment
|
|
||||||
- Do your development work
|
|
||||||
- `core stop` - Stop services when done
|
|
||||||
|
|
||||||
## Support
|
|
||||||
|
|
||||||
For issues and questions:
|
|
||||||
|
|
||||||
- Check the main Core repository: https://github.com/redplanethq/core
|
|
||||||
- Review Docker container logs for troubleshooting
|
|
||||||
- Ensure all prerequisites are properly installed
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
- 🚀 **One-command setup** - Complete environment initialization
|
|
||||||
- 🔄 **Smart configuration** - Skips already configured components
|
|
||||||
- 📱 **Real-time feedback** - Live progress indicators and output
|
|
||||||
- 🐳 **Docker integration** - Full container lifecycle management
|
|
||||||
- 🔧 **Interactive setup** - Guided configuration process
|
|
||||||
- 🎯 **Error handling** - Graceful failure with recovery instructions
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Happy coding with Core!** 🎉
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
# Exit on any error
|
|
||||||
set -e
|
|
||||||
|
|
||||||
echo "Starting init CLI..."
|
|
||||||
|
|
||||||
# Wait for database to be ready
|
|
||||||
echo "Waiting for database connection..."
|
|
||||||
until pg_isready -h "${DB_HOST:-localhost}" -p "${DB_PORT:-5432}" -U "${POSTGRES_USER:-docker}"; do
|
|
||||||
echo "Database is unavailable - sleeping"
|
|
||||||
sleep 2
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "Database is ready!"
|
|
||||||
|
|
||||||
# Run the init command
|
|
||||||
echo "Running init command..."
|
|
||||||
node ./dist/esm/index.js init
|
|
||||||
|
|
||||||
echo "Init completed successfully!"
|
|
||||||
exit 0
|
|
||||||
@ -1,145 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@redplanethq/init",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"description": "A init service to create trigger instance",
|
|
||||||
"type": "module",
|
|
||||||
"license": "MIT",
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://github.com/redplanethq/core",
|
|
||||||
"directory": "apps/init"
|
|
||||||
},
|
|
||||||
"publishConfig": {
|
|
||||||
"access": "public"
|
|
||||||
},
|
|
||||||
"keywords": [
|
|
||||||
"typescript"
|
|
||||||
],
|
|
||||||
"files": [
|
|
||||||
"dist",
|
|
||||||
"trigger.dump"
|
|
||||||
],
|
|
||||||
"bin": {
|
|
||||||
"core": "./dist/esm/index.js"
|
|
||||||
},
|
|
||||||
"tshy": {
|
|
||||||
"selfLink": false,
|
|
||||||
"main": false,
|
|
||||||
"module": false,
|
|
||||||
"dialects": [
|
|
||||||
"esm"
|
|
||||||
],
|
|
||||||
"project": "./tsconfig.json",
|
|
||||||
"exclude": [
|
|
||||||
"**/*.test.ts"
|
|
||||||
],
|
|
||||||
"exports": {
|
|
||||||
"./package.json": "./package.json",
|
|
||||||
".": "./src/index.ts"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@epic-web/test-server": "^0.1.0",
|
|
||||||
"@types/gradient-string": "^1.1.2",
|
|
||||||
"@types/ini": "^4.1.1",
|
|
||||||
"@types/object-hash": "3.0.6",
|
|
||||||
"@types/polka": "^0.5.7",
|
|
||||||
"@types/react": "^18.2.48",
|
|
||||||
"@types/resolve": "^1.20.6",
|
|
||||||
"@types/rimraf": "^4.0.5",
|
|
||||||
"@types/semver": "^7.5.0",
|
|
||||||
"@types/source-map-support": "0.5.10",
|
|
||||||
"@types/ws": "^8.5.3",
|
|
||||||
"cpy-cli": "^5.0.0",
|
|
||||||
"execa": "^8.0.1",
|
|
||||||
"find-up": "^7.0.0",
|
|
||||||
"rimraf": "^5.0.7",
|
|
||||||
"ts-essentials": "10.0.1",
|
|
||||||
"tshy": "^3.0.2",
|
|
||||||
"tsx": "4.17.0"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"clean": "rimraf dist .tshy .tshy-build .turbo",
|
|
||||||
"typecheck": "tsc -p tsconfig.src.json --noEmit",
|
|
||||||
"build": "tshy",
|
|
||||||
"test": "vitest",
|
|
||||||
"test:e2e": "vitest --run -c ./e2e/vitest.config.ts"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@clack/prompts": "^0.10.0",
|
|
||||||
"@depot/cli": "0.0.1-cli.2.80.0",
|
|
||||||
"@opentelemetry/api": "1.9.0",
|
|
||||||
"@opentelemetry/api-logs": "0.52.1",
|
|
||||||
"@opentelemetry/exporter-logs-otlp-http": "0.52.1",
|
|
||||||
"@opentelemetry/exporter-trace-otlp-http": "0.52.1",
|
|
||||||
"@opentelemetry/instrumentation": "0.52.1",
|
|
||||||
"@opentelemetry/instrumentation-fetch": "0.52.1",
|
|
||||||
"@opentelemetry/resources": "1.25.1",
|
|
||||||
"@opentelemetry/sdk-logs": "0.52.1",
|
|
||||||
"@opentelemetry/sdk-node": "0.52.1",
|
|
||||||
"@opentelemetry/sdk-trace-base": "1.25.1",
|
|
||||||
"@opentelemetry/sdk-trace-node": "1.25.1",
|
|
||||||
"@opentelemetry/semantic-conventions": "1.25.1",
|
|
||||||
"ansi-escapes": "^7.0.0",
|
|
||||||
"braces": "^3.0.3",
|
|
||||||
"c12": "^1.11.1",
|
|
||||||
"chalk": "^5.2.0",
|
|
||||||
"chokidar": "^3.6.0",
|
|
||||||
"cli-table3": "^0.6.3",
|
|
||||||
"commander": "^9.4.1",
|
|
||||||
"defu": "^6.1.4",
|
|
||||||
"dotenv": "^16.4.5",
|
|
||||||
"dotenv-expand": "^12.0.2",
|
|
||||||
"esbuild": "^0.23.0",
|
|
||||||
"eventsource": "^3.0.2",
|
|
||||||
"evt": "^2.4.13",
|
|
||||||
"fast-npm-meta": "^0.2.2",
|
|
||||||
"git-last-commit": "^1.0.1",
|
|
||||||
"gradient-string": "^2.0.2",
|
|
||||||
"has-flag": "^5.0.1",
|
|
||||||
"import-in-the-middle": "1.11.0",
|
|
||||||
"import-meta-resolve": "^4.1.0",
|
|
||||||
"ini": "^5.0.0",
|
|
||||||
"jsonc-parser": "3.2.1",
|
|
||||||
"magicast": "^0.3.4",
|
|
||||||
"minimatch": "^10.0.1",
|
|
||||||
"mlly": "^1.7.1",
|
|
||||||
"nypm": "^0.5.4",
|
|
||||||
"nanoid": "3.3.8",
|
|
||||||
"object-hash": "^3.0.0",
|
|
||||||
"open": "^10.0.3",
|
|
||||||
"knex": "3.1.0",
|
|
||||||
"p-limit": "^6.2.0",
|
|
||||||
"p-retry": "^6.1.0",
|
|
||||||
"partysocket": "^1.0.2",
|
|
||||||
"pkg-types": "^1.1.3",
|
|
||||||
"polka": "^0.5.2",
|
|
||||||
"pg": "8.16.3",
|
|
||||||
"resolve": "^1.22.8",
|
|
||||||
"semver": "^7.5.0",
|
|
||||||
"signal-exit": "^4.1.0",
|
|
||||||
"source-map-support": "0.5.21",
|
|
||||||
"std-env": "^3.7.0",
|
|
||||||
"supports-color": "^10.0.0",
|
|
||||||
"tiny-invariant": "^1.2.0",
|
|
||||||
"tinyexec": "^0.3.1",
|
|
||||||
"tinyglobby": "^0.2.10",
|
|
||||||
"uuid": "11.1.0",
|
|
||||||
"ws": "^8.18.0",
|
|
||||||
"xdg-app-paths": "^8.3.0",
|
|
||||||
"zod": "3.23.8",
|
|
||||||
"zod-validation-error": "^1.5.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=18.20.0"
|
|
||||||
},
|
|
||||||
"exports": {
|
|
||||||
"./package.json": "./package.json",
|
|
||||||
".": {
|
|
||||||
"import": {
|
|
||||||
"types": "./dist/esm/index.d.ts",
|
|
||||||
"default": "./dist/esm/index.js"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,14 +0,0 @@
|
|||||||
import { Command } from "commander";
|
|
||||||
import { initCommand } from "../commands/init.js";
|
|
||||||
import { VERSION } from "./version.js";
|
|
||||||
|
|
||||||
const program = new Command();
|
|
||||||
|
|
||||||
program.name("core").description("Core CLI - A Command-Line Interface for Core").version(VERSION);
|
|
||||||
|
|
||||||
program
|
|
||||||
.command("init")
|
|
||||||
.description("Initialize Core development environment (run once)")
|
|
||||||
.action(initCommand);
|
|
||||||
|
|
||||||
program.parse(process.argv);
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
import { env } from "../utils/env.js";
|
|
||||||
|
|
||||||
export const VERSION = env.VERSION;
|
|
||||||
@ -1,36 +0,0 @@
|
|||||||
import { intro, outro, note } from "@clack/prompts";
|
|
||||||
import { printCoreBrainLogo } from "../utils/ascii.js";
|
|
||||||
import { initTriggerDatabase, updateWorkerImage } from "../utils/trigger.js";
|
|
||||||
|
|
||||||
export async function initCommand() {
|
|
||||||
// Display the CORE brain logo
|
|
||||||
printCoreBrainLogo();
|
|
||||||
|
|
||||||
intro("🚀 Core Development Environment Setup");
|
|
||||||
|
|
||||||
try {
|
|
||||||
await initTriggerDatabase();
|
|
||||||
await updateWorkerImage();
|
|
||||||
|
|
||||||
note(
|
|
||||||
[
|
|
||||||
"Your services will start running:",
|
|
||||||
"",
|
|
||||||
"• Core Application: http://localhost:3033",
|
|
||||||
"• Trigger.dev: http://localhost:8030",
|
|
||||||
"• PostgreSQL: localhost:5432",
|
|
||||||
"",
|
|
||||||
"You can now start developing with Core!",
|
|
||||||
"",
|
|
||||||
"ℹ️ When logging in to the Core Application, you can find the login URL in the Docker container logs:",
|
|
||||||
" docker logs core-app --tail 50",
|
|
||||||
].join("\n"),
|
|
||||||
"🚀 Services Running"
|
|
||||||
);
|
|
||||||
outro("🎉 Setup Complete!");
|
|
||||||
process.exit(0);
|
|
||||||
} catch (error: any) {
|
|
||||||
outro(`❌ Setup failed: ${error.message}`);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
|
|
||||||
import "./cli/index.js";
|
|
||||||
@ -1,29 +0,0 @@
|
|||||||
import chalk from "chalk";
|
|
||||||
import { VERSION } from "../cli/version.js";
|
|
||||||
|
|
||||||
export function printCoreBrainLogo(): void {
|
|
||||||
const brain = `
|
|
||||||
██████╗ ██████╗ ██████╗ ███████╗
|
|
||||||
██╔════╝██╔═══██╗██╔══██╗██╔════╝
|
|
||||||
██║ ██║ ██║██████╔╝█████╗
|
|
||||||
██║ ██║ ██║██╔══██╗██╔══╝
|
|
||||||
╚██████╗╚██████╔╝██║ ██║███████╗
|
|
||||||
╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚══════╝
|
|
||||||
|
|
||||||
o o o
|
|
||||||
o o---o---o o
|
|
||||||
o---o o o---o---o
|
|
||||||
o o---o---o---o o
|
|
||||||
o---o o o---o---o
|
|
||||||
o o---o---o o
|
|
||||||
o o o
|
|
||||||
|
|
||||||
`;
|
|
||||||
|
|
||||||
console.log(chalk.cyan(brain));
|
|
||||||
console.log(
|
|
||||||
chalk.bold.white(
|
|
||||||
` 🧠 CORE - Contextual Observation & Recall Engine ${VERSION ? chalk.gray(`(${VERSION})`) : ""}\n`
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
const EnvironmentSchema = z.object({
|
|
||||||
// Version
|
|
||||||
VERSION: z.string().default("0.1.24"),
|
|
||||||
|
|
||||||
// Database
|
|
||||||
DB_HOST: z.string().default("localhost"),
|
|
||||||
DB_PORT: z.string().default("5432"),
|
|
||||||
TRIGGER_DB: z.string().default("trigger"),
|
|
||||||
POSTGRES_USER: z.string().default("docker"),
|
|
||||||
POSTGRES_PASSWORD: z.string().default("docker"),
|
|
||||||
|
|
||||||
// Trigger database
|
|
||||||
TRIGGER_TASKS_IMAGE: z.string().default("redplanethq/proj_core:latest"),
|
|
||||||
|
|
||||||
// Node environment
|
|
||||||
NODE_ENV: z
|
|
||||||
.union([z.literal("development"), z.literal("production"), z.literal("test")])
|
|
||||||
.default("development"),
|
|
||||||
});
|
|
||||||
|
|
||||||
export type Environment = z.infer<typeof EnvironmentSchema>;
|
|
||||||
export const env = EnvironmentSchema.parse(process.env);
|
|
||||||
@ -1,182 +0,0 @@
|
|||||||
import Knex from "knex";
|
|
||||||
import path from "path";
|
|
||||||
import { fileURLToPath } from "url";
|
|
||||||
import { env } from "./env.js";
|
|
||||||
import { spinner, note, log } from "@clack/prompts";
|
|
||||||
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
|
||||||
const __dirname = path.dirname(__filename);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a PostgreSQL database URL for the given database name.
|
|
||||||
* Throws if required environment variables are missing.
|
|
||||||
*/
|
|
||||||
export function getDatabaseUrl(dbName: string): string {
|
|
||||||
const { POSTGRES_USER, POSTGRES_PASSWORD, DB_HOST, DB_PORT } = env;
|
|
||||||
|
|
||||||
if (!POSTGRES_USER || !POSTGRES_PASSWORD || !DB_HOST || !DB_PORT || !dbName) {
|
|
||||||
throw new Error(
|
|
||||||
"One or more required environment variables are missing: POSTGRES_USER, POSTGRES_PASSWORD, DB_HOST, DB_PORT, dbName"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return `postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${DB_HOST}:${DB_PORT}/${dbName}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks if the database specified by TRIGGER_DB exists, and creates it if it does not.
|
|
||||||
* Returns { exists: boolean, created: boolean } - exists indicates success, created indicates if database was newly created.
|
|
||||||
*/
|
|
||||||
export async function ensureDatabaseExists(): Promise<{ exists: boolean; created: boolean }> {
|
|
||||||
const { TRIGGER_DB } = env;
|
|
||||||
|
|
||||||
if (!TRIGGER_DB) {
|
|
||||||
throw new Error("TRIGGER_DB environment variable is missing");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build a connection string to the default 'postgres' database
|
|
||||||
const adminDbUrl = getDatabaseUrl("postgres");
|
|
||||||
|
|
||||||
// Create a Knex instance for the admin connection
|
|
||||||
const adminKnex = Knex({
|
|
||||||
client: "pg",
|
|
||||||
connection: adminDbUrl,
|
|
||||||
});
|
|
||||||
|
|
||||||
const s = spinner();
|
|
||||||
s.start("Checking for Trigger.dev database...");
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Check if the database exists
|
|
||||||
const result = await adminKnex.select(1).from("pg_database").where("datname", TRIGGER_DB);
|
|
||||||
|
|
||||||
if (result.length === 0) {
|
|
||||||
s.message("Database not found. Creating...");
|
|
||||||
// Database does not exist, create it
|
|
||||||
await adminKnex.raw(`CREATE DATABASE "${TRIGGER_DB}"`);
|
|
||||||
s.stop("Database created.");
|
|
||||||
return { exists: true, created: true };
|
|
||||||
} else {
|
|
||||||
s.stop("Database exists.");
|
|
||||||
return { exists: true, created: false };
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
s.stop("Failed to ensure database exists.");
|
|
||||||
log.warning("Failed to ensure database exists: " + (err as Error).message);
|
|
||||||
return { exists: false, created: false };
|
|
||||||
} finally {
|
|
||||||
await adminKnex.destroy();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Main initialization function
|
|
||||||
export async function initTriggerDatabase() {
|
|
||||||
const { TRIGGER_DB } = env;
|
|
||||||
|
|
||||||
if (!TRIGGER_DB) {
|
|
||||||
throw new Error("TRIGGER_DB environment variable is missing");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure the database exists
|
|
||||||
const { exists, created } = await ensureDatabaseExists();
|
|
||||||
if (!exists) {
|
|
||||||
throw new Error("Failed to create or verify database exists");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Only run pg_restore if the database was newly created
|
|
||||||
if (!created) {
|
|
||||||
note("Database already exists, skipping restore from trigger.dump");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run pg_restore with the trigger.dump file
|
|
||||||
const dumpFilePath = path.join(__dirname, "../../../trigger.dump");
|
|
||||||
const connectionString = getDatabaseUrl(TRIGGER_DB);
|
|
||||||
|
|
||||||
const s = spinner();
|
|
||||||
s.start("Restoring database from trigger.dump...");
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Use execSync and capture stdout/stderr, send to spinner.log
|
|
||||||
const { spawn } = await import("child_process");
|
|
||||||
await new Promise<void>((resolve, reject) => {
|
|
||||||
const child = spawn(
|
|
||||||
"pg_restore",
|
|
||||||
["--verbose", "--no-acl", "--no-owner", "-d", connectionString, dumpFilePath],
|
|
||||||
{ stdio: ["ignore", "pipe", "pipe"] }
|
|
||||||
);
|
|
||||||
|
|
||||||
child.stdout.on("data", (data) => {
|
|
||||||
s.message(data.toString());
|
|
||||||
});
|
|
||||||
|
|
||||||
child.stderr.on("data", (data) => {
|
|
||||||
s.message(data.toString());
|
|
||||||
});
|
|
||||||
|
|
||||||
child.on("close", (code) => {
|
|
||||||
if (code === 0) {
|
|
||||||
s.stop("Database restored successfully from trigger.dump");
|
|
||||||
resolve();
|
|
||||||
} else {
|
|
||||||
s.stop("Failed to restore database.");
|
|
||||||
log.warning(`Failed to restore database: pg_restore exited with code ${code}`);
|
|
||||||
reject(new Error(`Database restore failed: pg_restore exited with code ${code}`));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
child.on("error", (err) => {
|
|
||||||
s.stop("Failed to restore database.");
|
|
||||||
log.warning("Failed to restore database: " + err.message);
|
|
||||||
reject(new Error(`Database restore failed: ${err.message}`));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
} catch (error: any) {
|
|
||||||
s.stop("Failed to restore database.");
|
|
||||||
log.warning("Failed to restore database: " + error.message);
|
|
||||||
throw new Error(`Database restore failed: ${error.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function updateWorkerImage() {
|
|
||||||
const { TRIGGER_DB, TRIGGER_TASKS_IMAGE } = env;
|
|
||||||
|
|
||||||
if (!TRIGGER_DB) {
|
|
||||||
throw new Error("TRIGGER_DB environment variable is missing");
|
|
||||||
}
|
|
||||||
|
|
||||||
const connectionString = getDatabaseUrl(TRIGGER_DB);
|
|
||||||
|
|
||||||
const knex = Knex({
|
|
||||||
client: "pg",
|
|
||||||
connection: connectionString,
|
|
||||||
});
|
|
||||||
|
|
||||||
const s = spinner();
|
|
||||||
s.start("Updating worker image reference...");
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Get the first record from WorkerDeployment table
|
|
||||||
const firstWorkerDeployment = await knex("WorkerDeployment").select("id").first();
|
|
||||||
|
|
||||||
if (!firstWorkerDeployment) {
|
|
||||||
s.stop("No WorkerDeployment records found, skipping image update");
|
|
||||||
note("No WorkerDeployment records found, skipping image update");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the imageReference column with the TRIGGER_TASKS_IMAGE value
|
|
||||||
await knex("WorkerDeployment").where("id", firstWorkerDeployment.id).update({
|
|
||||||
imageReference: TRIGGER_TASKS_IMAGE,
|
|
||||||
updatedAt: new Date(),
|
|
||||||
});
|
|
||||||
|
|
||||||
s.stop(`Successfully updated worker image reference to: ${TRIGGER_TASKS_IMAGE}`);
|
|
||||||
} catch (error: any) {
|
|
||||||
s.stop("Failed to update worker image.");
|
|
||||||
log.warning("Failed to update worker image: " + error.message);
|
|
||||||
throw new Error(`Worker image update failed: ${error.message}`);
|
|
||||||
} finally {
|
|
||||||
await knex.destroy();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Binary file not shown.
@ -1,40 +0,0 @@
|
|||||||
{
|
|
||||||
"include": ["./src/**/*.ts"],
|
|
||||||
"exclude": ["./src/**/*.test.ts"],
|
|
||||||
"compilerOptions": {
|
|
||||||
"target": "es2022",
|
|
||||||
"lib": ["ES2022", "DOM", "DOM.Iterable", "DOM.AsyncIterable"],
|
|
||||||
"module": "NodeNext",
|
|
||||||
"moduleResolution": "NodeNext",
|
|
||||||
"moduleDetection": "force",
|
|
||||||
"verbatimModuleSyntax": false,
|
|
||||||
"jsx": "react",
|
|
||||||
|
|
||||||
"strict": true,
|
|
||||||
"alwaysStrict": true,
|
|
||||||
"strictPropertyInitialization": true,
|
|
||||||
"skipLibCheck": true,
|
|
||||||
"forceConsistentCasingInFileNames": true,
|
|
||||||
"noUnusedLocals": false,
|
|
||||||
"noUnusedParameters": false,
|
|
||||||
"noImplicitAny": true,
|
|
||||||
"noImplicitReturns": true,
|
|
||||||
"noImplicitThis": true,
|
|
||||||
|
|
||||||
"noFallthroughCasesInSwitch": true,
|
|
||||||
"resolveJsonModule": true,
|
|
||||||
|
|
||||||
"removeComments": false,
|
|
||||||
"esModuleInterop": true,
|
|
||||||
"emitDecoratorMetadata": false,
|
|
||||||
"experimentalDecorators": false,
|
|
||||||
"downlevelIteration": true,
|
|
||||||
"isolatedModules": true,
|
|
||||||
"noUncheckedIndexedAccess": true,
|
|
||||||
|
|
||||||
"pretty": true,
|
|
||||||
"isolatedDeclarations": false,
|
|
||||||
"composite": true,
|
|
||||||
"sourceMap": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,8 +0,0 @@
|
|||||||
import { configDefaults, defineConfig } from "vitest/config";
|
|
||||||
|
|
||||||
export default defineConfig({
|
|
||||||
test: {
|
|
||||||
globals: true,
|
|
||||||
exclude: [...configDefaults.exclude, "e2e/**/*"],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
50
apps/webapp/app/bullmq/connection.ts
Normal file
50
apps/webapp/app/bullmq/connection.ts
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
import Redis, { type RedisOptions } from "ioredis";
|
||||||
|
|
||||||
|
let redisConnection: Redis | null = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get or create a Redis connection for BullMQ
|
||||||
|
* This connection is shared across all queues and workers
|
||||||
|
*/
|
||||||
|
export function getRedisConnection() {
|
||||||
|
if (redisConnection) {
|
||||||
|
return redisConnection;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dynamically import ioredis only when needed
|
||||||
|
|
||||||
|
const redisConfig: RedisOptions = {
|
||||||
|
host: process.env.REDIS_HOST,
|
||||||
|
port: parseInt(process.env.REDIS_PORT as string),
|
||||||
|
password: process.env.REDIS_PASSWORD,
|
||||||
|
maxRetriesPerRequest: null, // Required for BullMQ
|
||||||
|
enableReadyCheck: false, // Required for BullMQ
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add TLS configuration if not disabled
|
||||||
|
if (!process.env.REDIS_TLS_DISABLED) {
|
||||||
|
redisConfig.tls = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
redisConnection = new Redis(redisConfig);
|
||||||
|
|
||||||
|
redisConnection.on("error", (error) => {
|
||||||
|
console.error("Redis connection error:", error);
|
||||||
|
});
|
||||||
|
|
||||||
|
redisConnection.on("connect", () => {
|
||||||
|
console.log("Redis connected successfully");
|
||||||
|
});
|
||||||
|
|
||||||
|
return redisConnection;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close the Redis connection (useful for graceful shutdown)
|
||||||
|
*/
|
||||||
|
export async function closeRedisConnection(): Promise<void> {
|
||||||
|
if (redisConnection) {
|
||||||
|
await redisConnection.quit();
|
||||||
|
redisConnection = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
160
apps/webapp/app/bullmq/queues/index.ts
Normal file
160
apps/webapp/app/bullmq/queues/index.ts
Normal file
@ -0,0 +1,160 @@
|
|||||||
|
/**
|
||||||
|
* BullMQ Queues
|
||||||
|
*
|
||||||
|
* All queue definitions for the BullMQ implementation
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Queue } from "bullmq";
|
||||||
|
import { getRedisConnection } from "../connection";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Episode ingestion queue
|
||||||
|
* Handles individual episode ingestion (including document chunks)
|
||||||
|
*/
|
||||||
|
export const ingestQueue = new Queue("ingest-queue", {
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600, // Keep completed jobs for 1 hour
|
||||||
|
count: 1000, // Keep last 1000 completed jobs
|
||||||
|
},
|
||||||
|
removeOnFail: {
|
||||||
|
age: 86400, // Keep failed jobs for 24 hours
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Document ingestion queue
|
||||||
|
* Handles document-level ingestion with differential processing
|
||||||
|
*/
|
||||||
|
export const documentIngestQueue = new Queue("document-ingest-queue", {
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600,
|
||||||
|
count: 1000,
|
||||||
|
},
|
||||||
|
removeOnFail: {
|
||||||
|
age: 86400,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Conversation title creation queue
|
||||||
|
*/
|
||||||
|
export const conversationTitleQueue = new Queue("conversation-title-queue", {
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600,
|
||||||
|
count: 1000,
|
||||||
|
},
|
||||||
|
removeOnFail: {
|
||||||
|
age: 86400,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session compaction queue
|
||||||
|
*/
|
||||||
|
export const sessionCompactionQueue = new Queue("session-compaction-queue", {
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600,
|
||||||
|
count: 1000,
|
||||||
|
},
|
||||||
|
removeOnFail: {
|
||||||
|
age: 86400,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* BERT topic analysis queue
|
||||||
|
* Handles CPU-intensive topic modeling on user episodes
|
||||||
|
*/
|
||||||
|
export const bertTopicQueue = new Queue("bert-topic-queue", {
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 2, // Only 2 attempts due to long runtime
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 5000,
|
||||||
|
},
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 7200, // Keep completed jobs for 2 hours
|
||||||
|
count: 100,
|
||||||
|
},
|
||||||
|
removeOnFail: {
|
||||||
|
age: 172800, // Keep failed jobs for 48 hours (for debugging)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Space assignment queue
|
||||||
|
* Handles assigning episodes to spaces based on semantic matching
|
||||||
|
*/
|
||||||
|
export const spaceAssignmentQueue = new Queue("space-assignment-queue", {
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600,
|
||||||
|
count: 1000,
|
||||||
|
},
|
||||||
|
removeOnFail: {
|
||||||
|
age: 86400,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Space summary queue
|
||||||
|
* Handles generating summaries for spaces
|
||||||
|
*/
|
||||||
|
export const spaceSummaryQueue = new Queue("space-summary-queue", {
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600,
|
||||||
|
count: 1000,
|
||||||
|
},
|
||||||
|
removeOnFail: {
|
||||||
|
age: 86400,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
154
apps/webapp/app/bullmq/start-workers.ts
Normal file
154
apps/webapp/app/bullmq/start-workers.ts
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
/**
|
||||||
|
* BullMQ Worker Startup Script
|
||||||
|
*
|
||||||
|
* This script starts all BullMQ workers for processing background jobs.
|
||||||
|
* Run this as a separate process alongside your main application.
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* tsx apps/webapp/app/bullmq/start-workers.ts
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import {
|
||||||
|
ingestWorker,
|
||||||
|
documentIngestWorker,
|
||||||
|
conversationTitleWorker,
|
||||||
|
sessionCompactionWorker,
|
||||||
|
closeAllWorkers,
|
||||||
|
bertTopicWorker,
|
||||||
|
spaceAssignmentWorker,
|
||||||
|
spaceSummaryWorker,
|
||||||
|
} from "./workers";
|
||||||
|
import {
|
||||||
|
ingestQueue,
|
||||||
|
documentIngestQueue,
|
||||||
|
conversationTitleQueue,
|
||||||
|
sessionCompactionQueue,
|
||||||
|
bertTopicQueue,
|
||||||
|
spaceAssignmentQueue,
|
||||||
|
spaceSummaryQueue,
|
||||||
|
} from "./queues";
|
||||||
|
import {
|
||||||
|
setupWorkerLogging,
|
||||||
|
startPeriodicMetricsLogging,
|
||||||
|
} from "./utils/worker-logger";
|
||||||
|
|
||||||
|
let metricsInterval: NodeJS.Timeout | null = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize and start all BullMQ workers with comprehensive logging
|
||||||
|
*/
|
||||||
|
export async function initWorkers(): Promise<void> {
|
||||||
|
// Setup comprehensive logging for all workers
|
||||||
|
setupWorkerLogging(ingestWorker, ingestQueue, "ingest-episode");
|
||||||
|
setupWorkerLogging(
|
||||||
|
documentIngestWorker,
|
||||||
|
documentIngestQueue,
|
||||||
|
"ingest-document",
|
||||||
|
);
|
||||||
|
setupWorkerLogging(
|
||||||
|
conversationTitleWorker,
|
||||||
|
conversationTitleQueue,
|
||||||
|
"conversation-title",
|
||||||
|
);
|
||||||
|
|
||||||
|
setupWorkerLogging(
|
||||||
|
sessionCompactionWorker,
|
||||||
|
sessionCompactionQueue,
|
||||||
|
"session-compaction",
|
||||||
|
);
|
||||||
|
|
||||||
|
setupWorkerLogging(bertTopicWorker, bertTopicQueue, "bert-topic");
|
||||||
|
|
||||||
|
setupWorkerLogging(
|
||||||
|
spaceAssignmentWorker,
|
||||||
|
spaceAssignmentQueue,
|
||||||
|
"space-assignment",
|
||||||
|
);
|
||||||
|
|
||||||
|
setupWorkerLogging(spaceSummaryWorker, spaceSummaryQueue, "space-summary");
|
||||||
|
|
||||||
|
// Start periodic metrics logging (every 60 seconds)
|
||||||
|
metricsInterval = startPeriodicMetricsLogging(
|
||||||
|
[
|
||||||
|
{ worker: ingestWorker, queue: ingestQueue, name: "ingest-episode" },
|
||||||
|
{
|
||||||
|
worker: documentIngestWorker,
|
||||||
|
queue: documentIngestQueue,
|
||||||
|
name: "ingest-document",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
worker: conversationTitleWorker,
|
||||||
|
queue: conversationTitleQueue,
|
||||||
|
name: "conversation-title",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
worker: sessionCompactionWorker,
|
||||||
|
queue: sessionCompactionQueue,
|
||||||
|
name: "session-compaction",
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
worker: bertTopicWorker,
|
||||||
|
queue: bertTopicQueue,
|
||||||
|
name: "bert-topic",
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
worker: spaceAssignmentWorker,
|
||||||
|
queue: spaceAssignmentQueue,
|
||||||
|
name: "space-assignment",
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
worker: spaceSummaryWorker,
|
||||||
|
queue: spaceAssignmentQueue,
|
||||||
|
name: "space-summary",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
60000, // Log metrics every 60 seconds
|
||||||
|
);
|
||||||
|
|
||||||
|
// Log worker startup
|
||||||
|
logger.log("\n🚀 Starting BullMQ workers...");
|
||||||
|
logger.log("─".repeat(80));
|
||||||
|
logger.log(`✓ Ingest worker: ${ingestWorker.name} (concurrency: 5)`);
|
||||||
|
logger.log(
|
||||||
|
`✓ Document ingest worker: ${documentIngestWorker.name} (concurrency: 3)`,
|
||||||
|
);
|
||||||
|
logger.log(
|
||||||
|
`✓ Conversation title worker: ${conversationTitleWorker.name} (concurrency: 10)`,
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.log(
|
||||||
|
`✓ Session compaction worker: ${sessionCompactionWorker.name} (concurrency: 3)`,
|
||||||
|
);
|
||||||
|
logger.log("─".repeat(80));
|
||||||
|
logger.log("✅ All BullMQ workers started and listening for jobs");
|
||||||
|
logger.log("📊 Metrics will be logged every 60 seconds\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Shutdown all workers gracefully
|
||||||
|
*/
|
||||||
|
export async function shutdownWorkers(): Promise<void> {
|
||||||
|
logger.log("Shutdown signal received, closing workers gracefully...");
|
||||||
|
if (metricsInterval) {
|
||||||
|
clearInterval(metricsInterval);
|
||||||
|
}
|
||||||
|
await closeAllWorkers();
|
||||||
|
}
|
||||||
|
|
||||||
|
// If running as standalone script, initialize workers
|
||||||
|
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||||
|
initWorkers();
|
||||||
|
|
||||||
|
// Handle graceful shutdown
|
||||||
|
const shutdown = async () => {
|
||||||
|
await shutdownWorkers();
|
||||||
|
process.exit(0);
|
||||||
|
};
|
||||||
|
|
||||||
|
process.on("SIGTERM", shutdown);
|
||||||
|
process.on("SIGINT", shutdown);
|
||||||
|
}
|
||||||
132
apps/webapp/app/bullmq/utils/job-finder.ts
Normal file
132
apps/webapp/app/bullmq/utils/job-finder.ts
Normal file
@ -0,0 +1,132 @@
|
|||||||
|
/**
|
||||||
|
* BullMQ Job Finder Utilities
|
||||||
|
*
|
||||||
|
* Helper functions to find, retrieve, and cancel BullMQ jobs
|
||||||
|
*/
|
||||||
|
|
||||||
|
interface JobInfo {
|
||||||
|
id: string;
|
||||||
|
isCompleted: boolean;
|
||||||
|
status?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all active queues
|
||||||
|
*/
|
||||||
|
async function getAllQueues() {
|
||||||
|
const {
|
||||||
|
ingestQueue,
|
||||||
|
documentIngestQueue,
|
||||||
|
conversationTitleQueue,
|
||||||
|
sessionCompactionQueue,
|
||||||
|
} = await import("../queues");
|
||||||
|
|
||||||
|
return [
|
||||||
|
ingestQueue,
|
||||||
|
documentIngestQueue,
|
||||||
|
conversationTitleQueue,
|
||||||
|
sessionCompactionQueue,
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find jobs by tags (metadata stored in job data)
|
||||||
|
* Since BullMQ doesn't have native tag support like Trigger.dev,
|
||||||
|
* we search through jobs and check if their data contains the required identifiers
|
||||||
|
*/
|
||||||
|
export async function getJobsByTags(
|
||||||
|
tags: string[],
|
||||||
|
taskIdentifier?: string,
|
||||||
|
): Promise<JobInfo[]> {
|
||||||
|
const queues = await getAllQueues();
|
||||||
|
const matchingJobs: JobInfo[] = [];
|
||||||
|
|
||||||
|
for (const queue of queues) {
|
||||||
|
// Skip if taskIdentifier is specified and doesn't match queue name
|
||||||
|
if (taskIdentifier && !queue.name.includes(taskIdentifier)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get all active and waiting jobs
|
||||||
|
const [active, waiting, delayed] = await Promise.all([
|
||||||
|
queue.getActive(),
|
||||||
|
queue.getWaiting(),
|
||||||
|
queue.getDelayed(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const allJobs = [...active, ...waiting, ...delayed];
|
||||||
|
|
||||||
|
for (const job of allJobs) {
|
||||||
|
// Check if job data contains all required tags
|
||||||
|
const jobData = job.data as any;
|
||||||
|
const matchesTags = tags.every(
|
||||||
|
(tag) =>
|
||||||
|
job.id?.includes(tag) ||
|
||||||
|
jobData.userId === tag ||
|
||||||
|
jobData.workspaceId === tag ||
|
||||||
|
jobData.queueId === tag,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (matchesTags) {
|
||||||
|
const state = await job.getState();
|
||||||
|
matchingJobs.push({
|
||||||
|
id: job.id!,
|
||||||
|
isCompleted: state === "completed" || state === "failed",
|
||||||
|
status: state,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return matchingJobs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a specific job by ID across all queues
|
||||||
|
*/
|
||||||
|
export async function getJobById(jobId: string): Promise<JobInfo | null> {
|
||||||
|
const queues = await getAllQueues();
|
||||||
|
|
||||||
|
for (const queue of queues) {
|
||||||
|
try {
|
||||||
|
const job = await queue.getJob(jobId);
|
||||||
|
if (job) {
|
||||||
|
const state = await job.getState();
|
||||||
|
return {
|
||||||
|
id: job.id!,
|
||||||
|
isCompleted: state === "completed" || state === "failed",
|
||||||
|
status: state,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Job not in this queue, continue
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cancel a job by ID
|
||||||
|
*/
|
||||||
|
export async function cancelJobById(jobId: string): Promise<void> {
|
||||||
|
const queues = await getAllQueues();
|
||||||
|
|
||||||
|
for (const queue of queues) {
|
||||||
|
try {
|
||||||
|
const job = await queue.getJob(jobId);
|
||||||
|
if (job) {
|
||||||
|
const state = await job.getState();
|
||||||
|
// Only remove if not already completed
|
||||||
|
if (state !== "completed" && state !== "failed") {
|
||||||
|
await job.remove();
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Job not in this queue, continue
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
184
apps/webapp/app/bullmq/utils/worker-logger.ts
Normal file
184
apps/webapp/app/bullmq/utils/worker-logger.ts
Normal file
@ -0,0 +1,184 @@
|
|||||||
|
/**
|
||||||
|
* BullMQ Worker Logger
|
||||||
|
*
|
||||||
|
* Comprehensive logging utility for tracking worker status, queue metrics,
|
||||||
|
* and job lifecycle events
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { type Worker, type Queue } from "bullmq";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
|
||||||
|
interface WorkerMetrics {
|
||||||
|
name: string;
|
||||||
|
concurrency: number;
|
||||||
|
activeJobs: number;
|
||||||
|
waitingJobs: number;
|
||||||
|
delayedJobs: number;
|
||||||
|
failedJobs: number;
|
||||||
|
completedJobs: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup comprehensive logging for a worker
|
||||||
|
*/
|
||||||
|
export function setupWorkerLogging(
|
||||||
|
worker: Worker,
|
||||||
|
queue: Queue,
|
||||||
|
workerName: string,
|
||||||
|
): void {
|
||||||
|
// Job picked up and started processing
|
||||||
|
worker.on("active", async (job) => {
|
||||||
|
const counts = await getQueueCounts(queue);
|
||||||
|
logger.log(
|
||||||
|
`[${workerName}] 🔄 Job started: ${job.id} | Queue: ${counts.waiting} waiting, ${counts.active} active, ${counts.delayed} delayed`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Job completed successfully
|
||||||
|
worker.on("completed", async (job, result) => {
|
||||||
|
const counts = await getQueueCounts(queue);
|
||||||
|
const duration = job.finishedOn ? job.finishedOn - job.processedOn! : 0;
|
||||||
|
logger.log(
|
||||||
|
`[${workerName}] ✅ Job completed: ${job.id} (${duration}ms) | Queue: ${counts.waiting} waiting, ${counts.active} active`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Job failed
|
||||||
|
worker.on("failed", async (job, error) => {
|
||||||
|
const counts = await getQueueCounts(queue);
|
||||||
|
const attempt = job?.attemptsMade || 0;
|
||||||
|
const maxAttempts = job?.opts?.attempts || 3;
|
||||||
|
logger.error(
|
||||||
|
`[${workerName}] ❌ Job failed: ${job?.id} (attempt ${attempt}/${maxAttempts}) | Error: ${error.message} | Queue: ${counts.waiting} waiting, ${counts.failed} failed`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Job progress update (if job reports progress)
|
||||||
|
worker.on("progress", async (job, progress) => {
|
||||||
|
logger.log(`[${workerName}] 📊 Job progress: ${job.id} - ${progress}%`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Worker stalled (job took too long)
|
||||||
|
worker.on("stalled", async (jobId) => {
|
||||||
|
logger.warn(`[${workerName}] ⚠️ Job stalled: ${jobId}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Worker error
|
||||||
|
worker.on("error", (error) => {
|
||||||
|
logger.error(`[${workerName}] 🔥 Worker error: ${error.message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Worker closed
|
||||||
|
worker.on("closed", () => {
|
||||||
|
logger.log(`[${workerName}] 🛑 Worker closed`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get queue counts for logging
|
||||||
|
*/
|
||||||
|
async function getQueueCounts(queue: Queue): Promise<{
|
||||||
|
waiting: number;
|
||||||
|
active: number;
|
||||||
|
delayed: number;
|
||||||
|
failed: number;
|
||||||
|
completed: number;
|
||||||
|
}> {
|
||||||
|
try {
|
||||||
|
const counts = await queue.getJobCounts(
|
||||||
|
"waiting",
|
||||||
|
"active",
|
||||||
|
"delayed",
|
||||||
|
"failed",
|
||||||
|
"completed",
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
waiting: counts.waiting || 0,
|
||||||
|
active: counts.active || 0,
|
||||||
|
delayed: counts.delayed || 0,
|
||||||
|
failed: counts.failed || 0,
|
||||||
|
completed: counts.completed || 0,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
return { waiting: 0, active: 0, delayed: 0, failed: 0, completed: 0 };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get metrics for all workers
|
||||||
|
*/
|
||||||
|
export async function getAllWorkerMetrics(
|
||||||
|
workers: Array<{ worker: Worker; queue: Queue; name: string }>,
|
||||||
|
): Promise<WorkerMetrics[]> {
|
||||||
|
const metrics = await Promise.all(
|
||||||
|
workers.map(async ({ worker, queue, name }) => {
|
||||||
|
const counts = await getQueueCounts(queue);
|
||||||
|
return {
|
||||||
|
name,
|
||||||
|
concurrency: worker.opts.concurrency || 1,
|
||||||
|
activeJobs: counts.active,
|
||||||
|
waitingJobs: counts.waiting,
|
||||||
|
delayedJobs: counts.delayed,
|
||||||
|
failedJobs: counts.failed,
|
||||||
|
completedJobs: counts.completed,
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
return metrics;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log worker metrics summary
|
||||||
|
*/
|
||||||
|
export function logWorkerMetrics(metrics: WorkerMetrics[]): void {
|
||||||
|
logger.log("\n📊 BullMQ Worker Metrics:");
|
||||||
|
logger.log("─".repeat(80));
|
||||||
|
|
||||||
|
for (const metric of metrics) {
|
||||||
|
logger.log(
|
||||||
|
`[${metric.name.padEnd(25)}] Concurrency: ${metric.concurrency} | ` +
|
||||||
|
`Active: ${metric.activeJobs} | Waiting: ${metric.waitingJobs} | ` +
|
||||||
|
`Delayed: ${metric.delayedJobs} | Failed: ${metric.failedJobs} | ` +
|
||||||
|
`Completed: ${metric.completedJobs}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const totals = metrics.reduce(
|
||||||
|
(acc, m) => ({
|
||||||
|
active: acc.active + m.activeJobs,
|
||||||
|
waiting: acc.waiting + m.waitingJobs,
|
||||||
|
delayed: acc.delayed + m.delayedJobs,
|
||||||
|
failed: acc.failed + m.failedJobs,
|
||||||
|
completed: acc.completed + m.completedJobs,
|
||||||
|
}),
|
||||||
|
{ active: 0, waiting: 0, delayed: 0, failed: 0, completed: 0 },
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.log("─".repeat(80));
|
||||||
|
logger.log(
|
||||||
|
`[TOTAL] Active: ${totals.active} | Waiting: ${totals.waiting} | ` +
|
||||||
|
`Delayed: ${totals.delayed} | Failed: ${totals.failed} | ` +
|
||||||
|
`Completed: ${totals.completed}`,
|
||||||
|
);
|
||||||
|
logger.log("─".repeat(80) + "\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start periodic metrics logging
|
||||||
|
*/
|
||||||
|
export function startPeriodicMetricsLogging(
|
||||||
|
workers: Array<{ worker: Worker; queue: Queue; name: string }>,
|
||||||
|
intervalMs: number = 60000, // Default: 1 minute
|
||||||
|
): NodeJS.Timeout {
|
||||||
|
const logMetrics = async () => {
|
||||||
|
const metrics = await getAllWorkerMetrics(workers);
|
||||||
|
logWorkerMetrics(metrics);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Log immediately on start
|
||||||
|
logMetrics();
|
||||||
|
|
||||||
|
// Then log periodically
|
||||||
|
return setInterval(logMetrics, intervalMs);
|
||||||
|
}
|
||||||
200
apps/webapp/app/bullmq/workers/index.ts
Normal file
200
apps/webapp/app/bullmq/workers/index.ts
Normal file
@ -0,0 +1,200 @@
|
|||||||
|
/**
|
||||||
|
* BullMQ Workers
|
||||||
|
*
|
||||||
|
* All worker definitions for processing background jobs with BullMQ
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Worker } from "bullmq";
|
||||||
|
import { getRedisConnection } from "../connection";
|
||||||
|
import {
|
||||||
|
processEpisodeIngestion,
|
||||||
|
type IngestEpisodePayload,
|
||||||
|
} from "~/jobs/ingest/ingest-episode.logic";
|
||||||
|
import {
|
||||||
|
processDocumentIngestion,
|
||||||
|
type IngestDocumentPayload,
|
||||||
|
} from "~/jobs/ingest/ingest-document.logic";
|
||||||
|
import {
|
||||||
|
processConversationTitleCreation,
|
||||||
|
type CreateConversationTitlePayload,
|
||||||
|
} from "~/jobs/conversation/create-title.logic";
|
||||||
|
import {
|
||||||
|
processSessionCompaction,
|
||||||
|
type SessionCompactionPayload,
|
||||||
|
} from "~/jobs/session/session-compaction.logic";
|
||||||
|
import {
|
||||||
|
processTopicAnalysis,
|
||||||
|
type TopicAnalysisPayload,
|
||||||
|
} from "~/jobs/bert/topic-analysis.logic";
|
||||||
|
|
||||||
|
import {
|
||||||
|
enqueueIngestEpisode,
|
||||||
|
enqueueSpaceAssignment,
|
||||||
|
enqueueSessionCompaction,
|
||||||
|
enqueueBertTopicAnalysis,
|
||||||
|
enqueueSpaceSummary,
|
||||||
|
} from "~/lib/queue-adapter.server";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import {
|
||||||
|
processSpaceAssignment,
|
||||||
|
type SpaceAssignmentPayload,
|
||||||
|
} from "~/jobs/spaces/space-assignment.logic";
|
||||||
|
import {
|
||||||
|
processSpaceSummary,
|
||||||
|
type SpaceSummaryPayload,
|
||||||
|
} from "~/jobs/spaces/space-summary.logic";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Episode ingestion worker
|
||||||
|
* Processes individual episode ingestion jobs with global concurrency
|
||||||
|
*
|
||||||
|
* Note: BullMQ uses global concurrency limit (5 jobs max).
|
||||||
|
* Trigger.dev uses per-user concurrency via concurrencyKey.
|
||||||
|
* For most open-source deployments, global concurrency is sufficient.
|
||||||
|
*/
|
||||||
|
export const ingestWorker = new Worker(
|
||||||
|
"ingest-queue",
|
||||||
|
async (job) => {
|
||||||
|
const payload = job.data as IngestEpisodePayload;
|
||||||
|
|
||||||
|
return await processEpisodeIngestion(
|
||||||
|
payload,
|
||||||
|
// Callbacks to enqueue follow-up jobs
|
||||||
|
enqueueSpaceAssignment,
|
||||||
|
enqueueSessionCompaction,
|
||||||
|
enqueueBertTopicAnalysis,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
concurrency: 1, // Global limit: process up to 1 jobs in parallel
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Document ingestion worker
|
||||||
|
* Handles document-level ingestion with differential processing
|
||||||
|
*
|
||||||
|
* Note: Per-user concurrency is achieved by using userId as part of the jobId
|
||||||
|
* when adding jobs to the queue
|
||||||
|
*/
|
||||||
|
export const documentIngestWorker = new Worker(
|
||||||
|
"document-ingest-queue",
|
||||||
|
async (job) => {
|
||||||
|
const payload = job.data as IngestDocumentPayload;
|
||||||
|
return await processDocumentIngestion(
|
||||||
|
payload,
|
||||||
|
// Callback to enqueue episode ingestion for each chunk
|
||||||
|
enqueueIngestEpisode,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
concurrency: 3, // Process up to 3 documents in parallel
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Conversation title creation worker
|
||||||
|
*/
|
||||||
|
export const conversationTitleWorker = new Worker(
|
||||||
|
"conversation-title-queue",
|
||||||
|
async (job) => {
|
||||||
|
const payload = job.data as CreateConversationTitlePayload;
|
||||||
|
return await processConversationTitleCreation(payload);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
concurrency: 10, // Process up to 10 title creations in parallel
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session compaction worker
|
||||||
|
*/
|
||||||
|
export const sessionCompactionWorker = new Worker(
|
||||||
|
"session-compaction-queue",
|
||||||
|
async (job) => {
|
||||||
|
const payload = job.data as SessionCompactionPayload;
|
||||||
|
return await processSessionCompaction(payload);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
concurrency: 3, // Process up to 3 compactions in parallel
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* BERT topic analysis worker
|
||||||
|
* Handles CPU-intensive topic modeling
|
||||||
|
*/
|
||||||
|
export const bertTopicWorker = new Worker(
|
||||||
|
"bert-topic-queue",
|
||||||
|
async (job) => {
|
||||||
|
const payload = job.data as TopicAnalysisPayload;
|
||||||
|
return await processTopicAnalysis(
|
||||||
|
payload,
|
||||||
|
// Callback to enqueue space summary
|
||||||
|
enqueueSpaceSummary,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
concurrency: 2, // Process up to 2 analyses in parallel (CPU-intensive)
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Space assignment worker
|
||||||
|
* Handles assigning episodes to spaces based on semantic matching
|
||||||
|
*
|
||||||
|
* Note: Global concurrency of 1 ensures sequential processing.
|
||||||
|
* Trigger.dev uses per-user concurrency via concurrencyKey.
|
||||||
|
*/
|
||||||
|
export const spaceAssignmentWorker = new Worker(
|
||||||
|
"space-assignment-queue",
|
||||||
|
async (job) => {
|
||||||
|
const payload = job.data as SpaceAssignmentPayload;
|
||||||
|
return await processSpaceAssignment(
|
||||||
|
payload,
|
||||||
|
// Callback to enqueue space summary
|
||||||
|
enqueueSpaceSummary,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
concurrency: 1, // Global limit: process one job at a time
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Space summary worker
|
||||||
|
* Handles generating summaries for spaces
|
||||||
|
*/
|
||||||
|
export const spaceSummaryWorker = new Worker(
|
||||||
|
"space-summary-queue",
|
||||||
|
async (job) => {
|
||||||
|
const payload = job.data as SpaceSummaryPayload;
|
||||||
|
return await processSpaceSummary(payload);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
concurrency: 1, // Process one space summary at a time
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Graceful shutdown handler
|
||||||
|
*/
|
||||||
|
export async function closeAllWorkers(): Promise<void> {
|
||||||
|
await Promise.all([
|
||||||
|
ingestWorker.close(),
|
||||||
|
documentIngestWorker.close(),
|
||||||
|
conversationTitleWorker.close(),
|
||||||
|
sessionCompactionWorker.close(),
|
||||||
|
bertTopicWorker.close(),
|
||||||
|
spaceSummaryWorker.close(),
|
||||||
|
spaceAssignmentWorker.close(),
|
||||||
|
]);
|
||||||
|
logger.log("All BullMQ workers closed");
|
||||||
|
}
|
||||||
@ -1,38 +1,42 @@
|
|||||||
import { EditorContent, useEditor } from "@tiptap/react";
|
import { EditorContent, useEditor } from "@tiptap/react";
|
||||||
|
|
||||||
import { useEffect } from "react";
|
import { useEffect, memo } from "react";
|
||||||
import { UserTypeEnum } from "@core/types";
|
|
||||||
import { type ConversationHistory } from "@core/database";
|
|
||||||
import { cn } from "~/lib/utils";
|
import { cn } from "~/lib/utils";
|
||||||
import { extensionsForConversation } from "./editor-extensions";
|
import { extensionsForConversation } from "./editor-extensions";
|
||||||
import { skillExtension } from "../editor/skill-extension";
|
import { skillExtension } from "../editor/skill-extension";
|
||||||
|
import { type UIMessage } from "ai";
|
||||||
|
|
||||||
interface AIConversationItemProps {
|
interface AIConversationItemProps {
|
||||||
conversationHistory: ConversationHistory;
|
message: UIMessage;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const ConversationItem = ({
|
function getMessage(message: string) {
|
||||||
conversationHistory,
|
let finalMessage = message.replace("<final_response>", "");
|
||||||
}: AIConversationItemProps) => {
|
finalMessage = finalMessage.replace("</final_response>", "");
|
||||||
const isUser =
|
finalMessage = finalMessage.replace("<question_response>", "");
|
||||||
conversationHistory.userType === UserTypeEnum.User ||
|
finalMessage = finalMessage.replace("</question_response>", "");
|
||||||
conversationHistory.userType === UserTypeEnum.System;
|
|
||||||
|
|
||||||
const id = `a${conversationHistory.id.replace(/-/g, "")}`;
|
return finalMessage;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ConversationItemComponent = ({ message }: AIConversationItemProps) => {
|
||||||
|
const isUser = message.role === "user" || false;
|
||||||
|
const textPart = message.parts.find((part) => part.type === "text");
|
||||||
|
|
||||||
const editor = useEditor({
|
const editor = useEditor({
|
||||||
extensions: [...extensionsForConversation, skillExtension],
|
extensions: [...extensionsForConversation, skillExtension],
|
||||||
editable: false,
|
editable: false,
|
||||||
content: conversationHistory.message,
|
content: textPart ? getMessage(textPart.text) : "",
|
||||||
});
|
});
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
editor?.commands.setContent(conversationHistory.message);
|
if (textPart) {
|
||||||
|
editor?.commands.setContent(getMessage(textPart.text));
|
||||||
|
}
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [id, conversationHistory.message]);
|
}, [message]);
|
||||||
|
|
||||||
if (!conversationHistory.message) {
|
if (!message) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -49,3 +53,12 @@ export const ConversationItem = ({
|
|||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Memoize to prevent unnecessary re-renders
|
||||||
|
export const ConversationItem = memo(
|
||||||
|
ConversationItemComponent,
|
||||||
|
(prevProps, nextProps) => {
|
||||||
|
// Only re-render if the conversation history ID or message changed
|
||||||
|
return prevProps.message === nextProps.message;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|||||||
@ -5,28 +5,30 @@ import { Paragraph } from "@tiptap/extension-paragraph";
|
|||||||
import { Text } from "@tiptap/extension-text";
|
import { Text } from "@tiptap/extension-text";
|
||||||
import { type Editor } from "@tiptap/react";
|
import { type Editor } from "@tiptap/react";
|
||||||
import { EditorContent, Placeholder, EditorRoot } from "novel";
|
import { EditorContent, Placeholder, EditorRoot } from "novel";
|
||||||
import { useCallback, useState } from "react";
|
import { useCallback, useState, useEffect } from "react";
|
||||||
import { cn } from "~/lib/utils";
|
import { cn } from "~/lib/utils";
|
||||||
import { Button } from "../ui";
|
import { Button } from "../ui";
|
||||||
import { LoaderCircle } from "lucide-react";
|
import { LoaderCircle } from "lucide-react";
|
||||||
import { Form, useSubmit } from "@remix-run/react";
|
import { Form, useSubmit, useActionData } from "@remix-run/react";
|
||||||
|
|
||||||
interface ConversationTextareaProps {
|
interface ConversationTextareaProps {
|
||||||
defaultValue?: string;
|
defaultValue?: string;
|
||||||
conversationId: string;
|
|
||||||
placeholder?: string;
|
placeholder?: string;
|
||||||
isLoading?: boolean;
|
isLoading?: boolean;
|
||||||
className?: string;
|
className?: string;
|
||||||
onChange?: (text: string) => void;
|
onChange?: (text: string) => void;
|
||||||
disabled?: boolean;
|
disabled?: boolean;
|
||||||
|
onConversationCreated?: (message: string) => void;
|
||||||
|
stop?: () => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function ConversationTextarea({
|
export function ConversationTextarea({
|
||||||
defaultValue,
|
defaultValue,
|
||||||
isLoading = false,
|
isLoading = false,
|
||||||
placeholder,
|
placeholder,
|
||||||
conversationId,
|
|
||||||
onChange,
|
onChange,
|
||||||
|
onConversationCreated,
|
||||||
|
stop,
|
||||||
}: ConversationTextareaProps) {
|
}: ConversationTextareaProps) {
|
||||||
const [text, setText] = useState(defaultValue ?? "");
|
const [text, setText] = useState(defaultValue ?? "");
|
||||||
const [editor, setEditor] = useState<Editor>();
|
const [editor, setEditor] = useState<Editor>();
|
||||||
@ -42,131 +44,99 @@ export function ConversationTextarea({
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const data = isLoading ? {} : { message: text, conversationId };
|
onConversationCreated && onConversationCreated(text);
|
||||||
|
|
||||||
submit(data as any, {
|
|
||||||
action: isLoading
|
|
||||||
? `/home/conversation/${conversationId}`
|
|
||||||
: "/home/conversation",
|
|
||||||
method: "post",
|
|
||||||
});
|
|
||||||
|
|
||||||
editor?.commands.clearContent(true);
|
editor?.commands.clearContent(true);
|
||||||
setText("");
|
setText("");
|
||||||
|
|
||||||
editor.commands.clearContent(true);
|
|
||||||
setText("");
|
|
||||||
}, [editor, text]);
|
}, [editor, text]);
|
||||||
|
|
||||||
// Send message to API
|
|
||||||
const submitForm = useCallback(
|
|
||||||
async (e: React.FormEvent<HTMLFormElement>) => {
|
|
||||||
const data = isLoading
|
|
||||||
? {}
|
|
||||||
: { message: text, title: text, conversationId };
|
|
||||||
|
|
||||||
submit(data as any, {
|
|
||||||
action: isLoading
|
|
||||||
? `/home/conversation/${conversationId}`
|
|
||||||
: "/home/conversation",
|
|
||||||
method: "post",
|
|
||||||
});
|
|
||||||
|
|
||||||
editor?.commands.clearContent(true);
|
|
||||||
setText("");
|
|
||||||
e.preventDefault();
|
|
||||||
},
|
|
||||||
[text, conversationId],
|
|
||||||
);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Form
|
<div className="bg-background-3 rounded-lg border-1 border-gray-300 py-2">
|
||||||
action="/home/conversation"
|
<EditorRoot>
|
||||||
method="post"
|
<EditorContent
|
||||||
onSubmit={(e) => submitForm(e)}
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
className="pt-2"
|
initialContent={defaultValue as any}
|
||||||
>
|
extensions={[
|
||||||
<div className="bg-background-3 rounded-lg border-1 border-gray-300 py-2">
|
Document,
|
||||||
<EditorRoot>
|
Paragraph,
|
||||||
<EditorContent
|
Text,
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
HardBreak.configure({
|
||||||
initialContent={defaultValue as any}
|
keepMarks: true,
|
||||||
extensions={[
|
}),
|
||||||
Document,
|
|
||||||
Paragraph,
|
|
||||||
Text,
|
|
||||||
HardBreak.configure({
|
|
||||||
keepMarks: true,
|
|
||||||
}),
|
|
||||||
|
|
||||||
Placeholder.configure({
|
Placeholder.configure({
|
||||||
placeholder: () => placeholder ?? "Ask sol...",
|
placeholder: () => placeholder ?? "Ask sol...",
|
||||||
includeChildren: true,
|
includeChildren: true,
|
||||||
}),
|
}),
|
||||||
History,
|
History,
|
||||||
]}
|
]}
|
||||||
onCreate={async ({ editor }) => {
|
onCreate={async ({ editor }) => {
|
||||||
setEditor(editor);
|
setEditor(editor);
|
||||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||||
editor.commands.focus("end");
|
editor.commands.focus("end");
|
||||||
}}
|
}}
|
||||||
onUpdate={({ editor }) => {
|
onUpdate={({ editor }) => {
|
||||||
onUpdate(editor);
|
onUpdate(editor);
|
||||||
}}
|
}}
|
||||||
shouldRerenderOnTransaction={false}
|
shouldRerenderOnTransaction={false}
|
||||||
editorProps={{
|
editorProps={{
|
||||||
attributes: {
|
attributes: {
|
||||||
class: `prose prose-lg dark:prose-invert prose-headings:font-title font-default focus:outline-none max-w-full`,
|
class: `prose prose-lg dark:prose-invert prose-headings:font-title font-default focus:outline-none max-w-full`,
|
||||||
},
|
},
|
||||||
handleKeyDown(view, event) {
|
handleKeyDown(view, event) {
|
||||||
if (event.key === "Enter" && !event.shiftKey) {
|
if (event.key === "Enter" && !event.shiftKey) {
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
const target = event.target as any;
|
const target = event.target as any;
|
||||||
if (target.innerHTML.includes("suggestion")) {
|
if (target.innerHTML.includes("suggestion")) {
|
||||||
return false;
|
return false;
|
||||||
}
|
|
||||||
event.preventDefault();
|
|
||||||
if (text) {
|
|
||||||
handleSend();
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
event.preventDefault();
|
||||||
|
if (text) {
|
||||||
|
handleSend();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
if (event.key === "Enter" && event.shiftKey) {
|
if (event.key === "Enter" && event.shiftKey) {
|
||||||
view.dispatch(
|
view.dispatch(
|
||||||
view.state.tr.replaceSelectionWith(
|
view.state.tr.replaceSelectionWith(
|
||||||
view.state.schema.nodes.hardBreak.create(),
|
view.state.schema.nodes.hardBreak.create(),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
}}
|
}}
|
||||||
immediatelyRender={false}
|
immediatelyRender={false}
|
||||||
className={cn(
|
className={cn(
|
||||||
"editor-container text-md max-h-[400px] min-h-[40px] w-full min-w-full overflow-auto rounded-lg px-3",
|
"editor-container text-md max-h-[400px] min-h-[40px] w-full min-w-full overflow-auto rounded-lg px-3",
|
||||||
)}
|
)}
|
||||||
/>
|
/>
|
||||||
</EditorRoot>
|
</EditorRoot>
|
||||||
<div className="mb-1 flex justify-end px-3">
|
<div className="mb-1 flex justify-end px-3">
|
||||||
<Button
|
<Button
|
||||||
variant="default"
|
variant="default"
|
||||||
className="gap-1 shadow-none transition-all duration-500 ease-in-out"
|
className="gap-1 shadow-none transition-all duration-500 ease-in-out"
|
||||||
type="submit"
|
onClick={() => {
|
||||||
size="lg"
|
if (!isLoading) {
|
||||||
>
|
handleSend();
|
||||||
{isLoading ? (
|
} else {
|
||||||
<>
|
stop && stop();
|
||||||
<LoaderCircle size={18} className="mr-1 animate-spin" />
|
}
|
||||||
Stop
|
}}
|
||||||
</>
|
size="lg"
|
||||||
) : (
|
>
|
||||||
<>Chat</>
|
{isLoading ? (
|
||||||
)}
|
<>
|
||||||
</Button>
|
<LoaderCircle size={18} className="mr-1 animate-spin" />
|
||||||
</div>
|
Stop
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>Chat</>
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
</Form>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -17,7 +17,7 @@ export const StreamingConversation = ({
|
|||||||
afterStreaming,
|
afterStreaming,
|
||||||
apiURL,
|
apiURL,
|
||||||
}: StreamingConversationProps) => {
|
}: StreamingConversationProps) => {
|
||||||
const { message, isEnd } = useTriggerStream(runId, token, apiURL);
|
const { message } = useTriggerStream(runId, token, apiURL, afterStreaming);
|
||||||
const [loadingText, setLoadingText] = React.useState("Thinking...");
|
const [loadingText, setLoadingText] = React.useState("Thinking...");
|
||||||
|
|
||||||
const loadingMessages = [
|
const loadingMessages = [
|
||||||
@ -48,13 +48,6 @@ export const StreamingConversation = ({
|
|||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [message]);
|
}, [message]);
|
||||||
|
|
||||||
React.useEffect(() => {
|
|
||||||
if (isEnd) {
|
|
||||||
afterStreaming();
|
|
||||||
}
|
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
}, [isEnd]);
|
|
||||||
|
|
||||||
React.useEffect(() => {
|
React.useEffect(() => {
|
||||||
let currentIndex = 0;
|
let currentIndex = 0;
|
||||||
let delay = 5000; // Start with 2 seconds for more thinking time
|
let delay = 5000; // Start with 2 seconds for more thinking time
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import { useRealtimeRunWithStreams } from "@trigger.dev/react-hooks";
|
import React, { useEffect, useState } from "react";
|
||||||
import React from "react";
|
import { EventSource, type ErrorEvent } from "eventsource";
|
||||||
|
|
||||||
const getTriggerAPIURL = (apiURL?: string) => {
|
const getTriggerAPIURL = (apiURL?: string) => {
|
||||||
return (
|
return (
|
||||||
@ -12,102 +12,53 @@ export const useTriggerStream = (
|
|||||||
runId: string,
|
runId: string,
|
||||||
token: string,
|
token: string,
|
||||||
apiURL?: string,
|
apiURL?: string,
|
||||||
|
afterStreaming?: (finalMessage: string) => void,
|
||||||
) => {
|
) => {
|
||||||
// Need to fix this later
|
// Need to fix this later
|
||||||
const baseURL = React.useMemo(() => getTriggerAPIURL(apiURL), [apiURL]);
|
const baseURL = React.useMemo(() => getTriggerAPIURL(apiURL), [apiURL]);
|
||||||
|
const [error, setError] = useState<ErrorEvent | null>(null);
|
||||||
|
const [message, setMessage] = useState("");
|
||||||
|
|
||||||
const { error, streams, run } = useRealtimeRunWithStreams(runId, {
|
useEffect(() => {
|
||||||
accessToken: token,
|
startStreaming();
|
||||||
baseURL, // Optional if you are using a self-hosted Trigger.dev instance
|
}, []);
|
||||||
});
|
|
||||||
|
|
||||||
const isEnd = React.useMemo(() => {
|
const startStreaming = () => {
|
||||||
if (error) {
|
const eventSource = new EventSource(
|
||||||
return true;
|
`${baseURL}/realtime/v1/streams/${runId}/messages`,
|
||||||
}
|
{
|
||||||
|
fetch: (input, init) =>
|
||||||
|
fetch(input, {
|
||||||
|
...init,
|
||||||
|
headers: {
|
||||||
|
...init.headers,
|
||||||
|
Authorization: `Bearer ${token}`,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
if (
|
eventSource.onmessage = (event) => {
|
||||||
run &&
|
try {
|
||||||
[
|
const eventData = JSON.parse(event.data);
|
||||||
"COMPLETED",
|
|
||||||
"CANCELED",
|
|
||||||
"FAILED",
|
|
||||||
"CRASHED",
|
|
||||||
"INTERRUPTED",
|
|
||||||
"SYSTEM_FAILURE",
|
|
||||||
"EXPIRED",
|
|
||||||
"TIMED_OUT",
|
|
||||||
].includes(run?.status)
|
|
||||||
) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasStreamEnd =
|
if (eventData.type.includes("MESSAGE_")) {
|
||||||
streams.messages &&
|
setMessage((prevMessage) => prevMessage + eventData.message);
|
||||||
streams.messages.filter((item) => {
|
}
|
||||||
// Check if the item has a type that includes 'MESSAGE_' and is not empty
|
} catch (e) {
|
||||||
return item.type?.includes("STREAM_END");
|
console.error("Failed to parse message:", e);
|
||||||
});
|
}
|
||||||
|
};
|
||||||
|
|
||||||
if (hasStreamEnd && hasStreamEnd.length > 0) {
|
eventSource.onerror = (err) => {
|
||||||
return true;
|
console.error("EventSource failed:", err);
|
||||||
}
|
setError(err);
|
||||||
|
eventSource.close();
|
||||||
|
if (afterStreaming) {
|
||||||
|
afterStreaming(message);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
return false;
|
return { error, message, actionMessages: [] };
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
}, [run?.status, error, streams.messages?.length]);
|
|
||||||
|
|
||||||
const message = React.useMemo(() => {
|
|
||||||
if (!streams?.messages) {
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filter and combine all message chunks
|
|
||||||
return streams.messages
|
|
||||||
.filter((item) => {
|
|
||||||
// Check if the item has a type that includes 'MESSAGE_' and is not empty
|
|
||||||
return item.type?.includes("MESSAGE_");
|
|
||||||
})
|
|
||||||
.map((item) => item.message)
|
|
||||||
.join("");
|
|
||||||
|
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
}, [streams.messages?.length]);
|
|
||||||
|
|
||||||
// const actionMessages = React.useMemo(() => {
|
|
||||||
// if (!streams?.messages) {
|
|
||||||
// return {};
|
|
||||||
// }
|
|
||||||
|
|
||||||
// // eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
// const messages: Record<string, { isStreaming: boolean; content: any[] }> =
|
|
||||||
// {};
|
|
||||||
|
|
||||||
// streams.messages.forEach((item) => {
|
|
||||||
// if (item.type?.includes("SKILL_")) {
|
|
||||||
// try {
|
|
||||||
// const parsed = JSON.parse(item.message);
|
|
||||||
// const skillId = parsed.skillId;
|
|
||||||
|
|
||||||
// if (!messages[skillId]) {
|
|
||||||
// messages[skillId] = { isStreaming: true, content: [] };
|
|
||||||
// }
|
|
||||||
|
|
||||||
// if (item.type === "SKILL_END") {
|
|
||||||
// messages[skillId].isStreaming = false;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// messages[skillId].content.push(parsed);
|
|
||||||
// } catch (e) {
|
|
||||||
// console.error("Failed to parse message:", e);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// });
|
|
||||||
|
|
||||||
// return messages;
|
|
||||||
|
|
||||||
// // eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
// }, [streams.messages?.length]);
|
|
||||||
|
|
||||||
return { isEnd, message, actionMessages: [] };
|
|
||||||
};
|
};
|
||||||
|
|||||||
@ -2,25 +2,20 @@ import { NodeViewWrapper } from "@tiptap/react";
|
|||||||
|
|
||||||
import React from "react";
|
import React from "react";
|
||||||
|
|
||||||
import { getIcon as iconUtil, type IconType } from "../../icon-utils";
|
import StaticLogo from "~/components/logo/logo";
|
||||||
|
|
||||||
import { ChevronDown, ChevronRight } from "lucide-react";
|
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
export const SkillComponent = (props: any) => {
|
export const SkillComponent = (props: any) => {
|
||||||
const id = props.node.attrs.id;
|
const id = props.node.attrs.id;
|
||||||
const name = props.node.attrs.name;
|
const name = props.node.attrs.name;
|
||||||
const agent = props.node.attrs.agent;
|
const agent = props.node.attrs.agent;
|
||||||
const [open, setOpen] = React.useState(false);
|
|
||||||
|
|
||||||
if (id === "undefined" || id === undefined || !name) {
|
if (id === "undefined" || id === undefined || !name) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const getIcon = () => {
|
const getIcon = () => {
|
||||||
const Icon = iconUtil(agent as IconType);
|
return <StaticLogo size={18} className="rounded-sm" />;
|
||||||
|
|
||||||
return <Icon size={18} className="rounded-sm" />;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const snakeToTitleCase = (input: string): string => {
|
const snakeToTitleCase = (input: string): string => {
|
||||||
@ -46,7 +41,7 @@ export const SkillComponent = (props: any) => {
|
|||||||
<>
|
<>
|
||||||
<div className="bg-grayAlpha-100 text-sm-md mt-0.5 flex w-fit items-center gap-2 rounded p-2">
|
<div className="bg-grayAlpha-100 text-sm-md mt-0.5 flex w-fit items-center gap-2 rounded p-2">
|
||||||
{getIcon()}
|
{getIcon()}
|
||||||
<span className="font-mono text-sm">{snakeToTitleCase(name)}</span>
|
<span className="font-mono text-sm">{snakeToTitleCase(agent)}</span>
|
||||||
</div>
|
</div>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
|
|||||||
@ -284,33 +284,37 @@ export const Graph = forwardRef<GraphRef, GraphProps>(
|
|||||||
// More nodes = need more space to prevent overcrowding
|
// More nodes = need more space to prevent overcrowding
|
||||||
let scalingRatio: number;
|
let scalingRatio: number;
|
||||||
if (nodeCount < 10) {
|
if (nodeCount < 10) {
|
||||||
scalingRatio = 15; // Tight for small graphs
|
scalingRatio = 20; // Slightly wider for small graphs
|
||||||
} else if (nodeCount < 50) {
|
} else if (nodeCount < 50) {
|
||||||
scalingRatio = 20 + (nodeCount - 10) * 0.5; // Gradual increase
|
scalingRatio = 30 + (nodeCount - 10) * 1.0; // Faster increase
|
||||||
} else if (nodeCount < 200) {
|
} else if (nodeCount < 200) {
|
||||||
scalingRatio = 40 + (nodeCount - 50) * 0.2; // Slower increase
|
scalingRatio = 70 + (nodeCount - 50) * 0.5; // More spread
|
||||||
|
} else if (nodeCount < 500) {
|
||||||
|
scalingRatio = 145 + (nodeCount - 200) * 0.3; // Continue spreading
|
||||||
} else {
|
} else {
|
||||||
scalingRatio = Math.min(80, 70 + (nodeCount - 200) * 0.05); // Cap at 80
|
scalingRatio = Math.min(300, 235 + (nodeCount - 500) * 0.1); // Cap at 300
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate optimal gravity based on density and node count
|
// Calculate optimal gravity based on density and node count
|
||||||
let gravity: number;
|
let gravity: number;
|
||||||
if (density > 0.3) {
|
if (density > 0.3) {
|
||||||
// Dense graphs need less gravity to prevent overcrowding
|
// Dense graphs need less gravity to prevent overcrowding
|
||||||
gravity = 1 + density * 2;
|
gravity = 0.5 + density * 1.5;
|
||||||
} else if (density > 0.1) {
|
} else if (density > 0.1) {
|
||||||
// Medium density graphs
|
// Medium density graphs
|
||||||
gravity = 3 + density * 5;
|
gravity = 2 + density * 3;
|
||||||
} else {
|
} else {
|
||||||
// Sparse graphs need more gravity to keep components together
|
// Sparse graphs need more gravity to keep components together
|
||||||
gravity = Math.min(8, 5 + (1 - density) * 3);
|
gravity = Math.min(6, 4 + (1 - density) * 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adjust gravity based on node count
|
// Adjust gravity based on node count - more aggressive reduction for large graphs
|
||||||
if (nodeCount < 20) {
|
if (nodeCount < 20) {
|
||||||
gravity *= 1.5; // Smaller graphs benefit from stronger gravity
|
gravity *= 1.5; // Smaller graphs benefit from stronger gravity
|
||||||
} else if (nodeCount > 100) {
|
} else if (nodeCount > 100) {
|
||||||
gravity *= 0.8; // Larger graphs need gentler gravity
|
gravity *= 0.5; // Larger graphs need much gentler gravity
|
||||||
|
} else if (nodeCount > 200) {
|
||||||
|
gravity *= 0.3; // Very large graphs need very gentle gravity
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate iterations based on complexity
|
// Calculate iterations based on complexity
|
||||||
@ -374,10 +378,10 @@ export const Graph = forwardRef<GraphRef, GraphProps>(
|
|||||||
settings: {
|
settings: {
|
||||||
...settings,
|
...settings,
|
||||||
barnesHutOptimize: true,
|
barnesHutOptimize: true,
|
||||||
strongGravityMode: true,
|
strongGravityMode: false, // Disable strong gravity for more spread
|
||||||
gravity: optimalParams.gravity,
|
gravity: optimalParams.gravity,
|
||||||
scalingRatio: optimalParams.scalingRatio,
|
scalingRatio: optimalParams.scalingRatio,
|
||||||
slowDown: 3,
|
slowDown: 1.5, // Reduced slowDown for better spreading
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -1,10 +1,4 @@
|
|||||||
import { EllipsisVertical, Trash, Copy } from "lucide-react";
|
import { Trash, Copy, RotateCw } from "lucide-react";
|
||||||
import {
|
|
||||||
DropdownMenu,
|
|
||||||
DropdownMenuContent,
|
|
||||||
DropdownMenuItem,
|
|
||||||
DropdownMenuTrigger,
|
|
||||||
} from "../ui/dropdown-menu";
|
|
||||||
import { Button } from "../ui/button";
|
import { Button } from "../ui/button";
|
||||||
import {
|
import {
|
||||||
AlertDialog,
|
AlertDialog,
|
||||||
@ -22,11 +16,13 @@ import { toast } from "~/hooks/use-toast";
|
|||||||
|
|
||||||
interface LogOptionsProps {
|
interface LogOptionsProps {
|
||||||
id: string;
|
id: string;
|
||||||
|
status?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const LogOptions = ({ id }: LogOptionsProps) => {
|
export const LogOptions = ({ id, status }: LogOptionsProps) => {
|
||||||
const [deleteDialogOpen, setDeleteDialogOpen] = useState(false);
|
const [deleteDialogOpen, setDeleteDialogOpen] = useState(false);
|
||||||
const deleteFetcher = useFetcher<{ success: boolean }>();
|
const deleteFetcher = useFetcher<{ success: boolean }>();
|
||||||
|
const retryFetcher = useFetcher<{ success: boolean }>();
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
|
|
||||||
const handleDelete = () => {
|
const handleDelete = () => {
|
||||||
@ -58,22 +54,54 @@ export const LogOptions = ({ id }: LogOptionsProps) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleRetry = () => {
|
||||||
|
retryFetcher.submit(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
action: `/api/v1/logs/${id}/retry`,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (deleteFetcher.state === "idle" && deleteFetcher.data?.success) {
|
if (deleteFetcher.state === "idle" && deleteFetcher.data?.success) {
|
||||||
navigate(`/home/inbox`);
|
navigate(`/home/inbox`);
|
||||||
}
|
}
|
||||||
}, [deleteFetcher.state, deleteFetcher.data]);
|
}, [deleteFetcher.state, deleteFetcher.data]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (retryFetcher.state === "idle" && retryFetcher.data?.success) {
|
||||||
|
toast({
|
||||||
|
title: "Success",
|
||||||
|
description: "Episode retry initiated",
|
||||||
|
});
|
||||||
|
// Reload the page to reflect the new status
|
||||||
|
window.location.reload();
|
||||||
|
}
|
||||||
|
}, [retryFetcher.state, retryFetcher.data]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
|
{status === "FAILED" && (
|
||||||
|
<Button
|
||||||
|
variant="secondary"
|
||||||
|
size="sm"
|
||||||
|
className="gap-2 rounded"
|
||||||
|
onClick={handleRetry}
|
||||||
|
disabled={retryFetcher.state !== "idle"}
|
||||||
|
>
|
||||||
|
<RotateCw size={15} /> Retry
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
<Button
|
<Button
|
||||||
variant="secondary"
|
variant="secondary"
|
||||||
size="sm"
|
size="sm"
|
||||||
className="gap-2 rounded"
|
className="gap-2 rounded"
|
||||||
onClick={handleCopy}
|
onClick={handleCopy}
|
||||||
>
|
>
|
||||||
<Copy size={15} /> Copy ID
|
<Copy size={15} /> Copy Id
|
||||||
</Button>
|
</Button>
|
||||||
<Button
|
<Button
|
||||||
variant="secondary"
|
variant="secondary"
|
||||||
|
|||||||
@ -74,7 +74,7 @@ export function LogTextCollapse({ text, log }: LogTextCollapseProps) {
|
|||||||
<div className={cn("flex w-full min-w-[0px] shrink flex-col")}>
|
<div className={cn("flex w-full min-w-[0px] shrink flex-col")}>
|
||||||
<div className="flex w-full items-center justify-between gap-4">
|
<div className="flex w-full items-center justify-between gap-4">
|
||||||
<div className="inline-flex min-h-[24px] min-w-[0px] shrink items-center justify-start">
|
<div className="inline-flex min-h-[24px] min-w-[0px] shrink items-center justify-start">
|
||||||
<div className={cn("truncate text-left")}>
|
<div className={cn("truncate text-left text-base")}>
|
||||||
{text.replace(/<[^>]+>/g, "")}
|
{text.replace(/<[^>]+>/g, "")}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -97,7 +97,7 @@ export function LogTextCollapse({ text, log }: LogTextCollapseProps) {
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex items-center justify-between">
|
<div className="flex items-center justify-between">
|
||||||
<div className="flex items-center gap-1">
|
<div className="flex items-center gap-1 font-light">
|
||||||
{getIconForAuthorise(log.source.toLowerCase(), 12, undefined)}
|
{getIconForAuthorise(log.source.toLowerCase(), 12, undefined)}
|
||||||
{log.source.toLowerCase()}
|
{log.source.toLowerCase()}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
4
apps/webapp/app/components/onboarding/index.ts
Normal file
4
apps/webapp/app/components/onboarding/index.ts
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
export { OnboardingModal } from "./onboarding-modal";
|
||||||
|
export { Provider, OnboardingStep } from "./types";
|
||||||
|
export type { ProviderConfig, OnboardingState } from "./types";
|
||||||
|
export { PROVIDER_CONFIGS, SUGGESTED_INGESTION_PROMPTS, VERIFICATION_PROMPT } from "./provider-config";
|
||||||
137
apps/webapp/app/components/onboarding/ingestion-step.tsx
Normal file
137
apps/webapp/app/components/onboarding/ingestion-step.tsx
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
import { useState } from "react";
|
||||||
|
import { Copy, Check, Loader2, AlertCircle } from "lucide-react";
|
||||||
|
import { Button } from "../ui";
|
||||||
|
import { SUGGESTED_INGESTION_PROMPTS } from "./provider-config";
|
||||||
|
|
||||||
|
interface IngestionStepProps {
|
||||||
|
providerName: string;
|
||||||
|
ingestionStatus: "idle" | "waiting" | "processing" | "complete" | "error";
|
||||||
|
onStartWaiting: () => void;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function IngestionStep({
|
||||||
|
providerName,
|
||||||
|
ingestionStatus,
|
||||||
|
onStartWaiting,
|
||||||
|
error,
|
||||||
|
}: IngestionStepProps) {
|
||||||
|
const [copiedIndex, setCopiedIndex] = useState<number | null>(null);
|
||||||
|
|
||||||
|
const handleCopy = async (text: string, index: number) => {
|
||||||
|
await navigator.clipboard.writeText(text);
|
||||||
|
setCopiedIndex(index);
|
||||||
|
setTimeout(() => setCopiedIndex(null), 2000);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div>
|
||||||
|
<h2 className="mb-2 text-xl font-semibold">
|
||||||
|
Let's Store Your First Memory
|
||||||
|
</h2>
|
||||||
|
<p className="text-muted-foreground text-sm">
|
||||||
|
Copy one of these prompts and paste it into {providerName} to create
|
||||||
|
your first memory
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{ingestionStatus === "idle" && (
|
||||||
|
<>
|
||||||
|
<div className="space-y-3">
|
||||||
|
{SUGGESTED_INGESTION_PROMPTS.map((prompt, index) => (
|
||||||
|
<div
|
||||||
|
key={index}
|
||||||
|
className="group bg-grayAlpha-100 hover:border-primary/50 relative rounded-lg border border-gray-300 p-4 transition-colors"
|
||||||
|
>
|
||||||
|
<p className="pr-10 text-sm">{prompt}</p>
|
||||||
|
<button
|
||||||
|
onClick={() => handleCopy(prompt, index)}
|
||||||
|
className="hover:bg-background absolute top-3 right-3 rounded-md p-2 transition-colors"
|
||||||
|
title="Copy to clipboard"
|
||||||
|
>
|
||||||
|
{copiedIndex === index ? (
|
||||||
|
<Check className="h-4 w-4 text-green-500" />
|
||||||
|
) : (
|
||||||
|
<Copy className="text-muted-foreground h-4 w-4" />
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-center justify-between rounded-lg border border-blue-500/20 bg-blue-500/10 p-4">
|
||||||
|
<div className="flex items-start gap-3">
|
||||||
|
<AlertCircle className="mt-0.5 h-5 w-5 text-blue-500" />
|
||||||
|
<div className="text-sm">
|
||||||
|
<p className="font-medium text-blue-700 dark:text-blue-300">
|
||||||
|
Important
|
||||||
|
</p>
|
||||||
|
<p className="text-blue-600 dark:text-blue-400">
|
||||||
|
After pasting the prompt in {providerName}, click the button
|
||||||
|
below to wait for ingestion
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex justify-end">
|
||||||
|
<Button onClick={onStartWaiting} size="lg">
|
||||||
|
I've Sent the Prompt
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{(ingestionStatus === "waiting" || ingestionStatus === "processing") && (
|
||||||
|
<div className="flex flex-col items-center justify-center space-y-4 py-12">
|
||||||
|
<Loader2 className="text-primary h-12 w-12 animate-spin" />
|
||||||
|
<div className="space-y-2 text-center">
|
||||||
|
<h3 className="text-lg font-medium">
|
||||||
|
{ingestionStatus === "waiting"
|
||||||
|
? "Waiting for your first ingestion..."
|
||||||
|
: "Processing your memory..."}
|
||||||
|
</h3>
|
||||||
|
<p className="text-muted-foreground max-w-md text-sm">
|
||||||
|
{ingestionStatus === "waiting"
|
||||||
|
? "Make sure you've sent the prompt in your provider app. We're listening for the first memory ingestion."
|
||||||
|
: "We're storing your information. This usually takes a few seconds."}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{ingestionStatus === "complete" && (
|
||||||
|
<div className="flex flex-col items-center justify-center space-y-4 py-12">
|
||||||
|
<div className="flex h-16 w-16 items-center justify-center rounded-full bg-green-500/10">
|
||||||
|
<Check className="h-8 w-8 text-green-500" />
|
||||||
|
</div>
|
||||||
|
<div className="space-y-2 text-center">
|
||||||
|
<h3 className="text-lg font-medium">Memory stored successfully!</h3>
|
||||||
|
<p className="text-muted-foreground text-sm">
|
||||||
|
Your first memory has been ingested. Let's verify it worked.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{ingestionStatus === "error" && (
|
||||||
|
<div className="flex flex-col items-center justify-center space-y-4 py-12">
|
||||||
|
<div className="flex h-16 w-16 items-center justify-center rounded-full bg-red-500/10">
|
||||||
|
<AlertCircle className="h-8 w-8 text-red-500" />
|
||||||
|
</div>
|
||||||
|
<div className="space-y-2 text-center">
|
||||||
|
<h3 className="text-lg font-medium">Something went wrong</h3>
|
||||||
|
<p className="text-muted-foreground max-w-md text-sm">
|
||||||
|
{error ||
|
||||||
|
"We couldn't detect your memory ingestion. Please try again or check your provider connection."}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<Button onClick={onStartWaiting} variant="secondary">
|
||||||
|
Try Again
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
230
apps/webapp/app/components/onboarding/onboarding-modal.tsx
Normal file
230
apps/webapp/app/components/onboarding/onboarding-modal.tsx
Normal file
@ -0,0 +1,230 @@
|
|||||||
|
import { useState } from "react";
|
||||||
|
import { Dialog, DialogContent, DialogHeader, DialogTitle } from "../ui/dialog";
|
||||||
|
import { type Provider, OnboardingStep } from "./types";
|
||||||
|
import { ProviderSelectionStep } from "./provider-selection-step";
|
||||||
|
import { IngestionStep } from "./ingestion-step";
|
||||||
|
import { VerificationStep } from "./verification-step";
|
||||||
|
import { PROVIDER_CONFIGS } from "./provider-config";
|
||||||
|
import { Progress } from "../ui/progress";
|
||||||
|
|
||||||
|
interface OnboardingModalProps {
|
||||||
|
isOpen: boolean;
|
||||||
|
onClose: () => void;
|
||||||
|
onComplete: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function OnboardingModal({
|
||||||
|
isOpen,
|
||||||
|
onClose,
|
||||||
|
onComplete,
|
||||||
|
}: OnboardingModalProps) {
|
||||||
|
const [currentStep, setCurrentStep] = useState<OnboardingStep>(
|
||||||
|
OnboardingStep.PROVIDER_SELECTION,
|
||||||
|
);
|
||||||
|
const [selectedProvider, setSelectedProvider] = useState<Provider>();
|
||||||
|
const [ingestionStatus, setIngestionStatus] = useState<
|
||||||
|
"idle" | "waiting" | "processing" | "complete" | "error"
|
||||||
|
>("idle");
|
||||||
|
const [verificationResult, setVerificationResult] = useState<string>();
|
||||||
|
const [isCheckingRecall, setIsCheckingRecall] = useState(false);
|
||||||
|
const [error, setError] = useState<string>();
|
||||||
|
|
||||||
|
// Calculate progress
|
||||||
|
const getProgress = () => {
|
||||||
|
switch (currentStep) {
|
||||||
|
case OnboardingStep.PROVIDER_SELECTION:
|
||||||
|
return 33;
|
||||||
|
case OnboardingStep.FIRST_INGESTION:
|
||||||
|
return 66;
|
||||||
|
case OnboardingStep.VERIFICATION:
|
||||||
|
return 100;
|
||||||
|
default:
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Poll for ingestion status
|
||||||
|
const pollIngestion = async () => {
|
||||||
|
setIngestionStatus("waiting");
|
||||||
|
|
||||||
|
try {
|
||||||
|
const maxAttempts = 30; // 60 seconds (30 * 2s)
|
||||||
|
let attempts = 0;
|
||||||
|
|
||||||
|
// Store the timestamp when polling starts
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
const poll = async (): Promise<boolean> => {
|
||||||
|
if (attempts >= maxAttempts) {
|
||||||
|
throw new Error("Ingestion timeout - please try again");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for new ingestion logs from the last 5 minutes
|
||||||
|
const response = await fetch("/api/v1/logs?limit=1");
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
// Check if there's a recent ingestion (created after we started polling)
|
||||||
|
if (data.logs && data.logs.length > 0) {
|
||||||
|
const latestLog = data.logs[0];
|
||||||
|
const logTime = new Date(latestLog.time).getTime();
|
||||||
|
|
||||||
|
// If the log was created after we started polling, we found a new ingestion
|
||||||
|
if (logTime >= startTime) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||||
|
attempts++;
|
||||||
|
|
||||||
|
return poll();
|
||||||
|
};
|
||||||
|
|
||||||
|
const success = await poll();
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
setIngestionStatus("complete");
|
||||||
|
// Auto-advance to verification step after 2 seconds
|
||||||
|
setTimeout(() => {
|
||||||
|
setCurrentStep(OnboardingStep.VERIFICATION);
|
||||||
|
}, 2000);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : "Unknown error occurred");
|
||||||
|
setIngestionStatus("error");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleProviderSelect = (provider: Provider) => {
|
||||||
|
setSelectedProvider(provider);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleContinueFromProvider = () => {
|
||||||
|
setCurrentStep(OnboardingStep.FIRST_INGESTION);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleStartWaiting = () => {
|
||||||
|
pollIngestion();
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleComplete = () => {
|
||||||
|
setCurrentStep(OnboardingStep.COMPLETE);
|
||||||
|
onComplete();
|
||||||
|
onClose();
|
||||||
|
};
|
||||||
|
|
||||||
|
// Poll for recall logs to detect verification
|
||||||
|
const pollRecallLogs = async () => {
|
||||||
|
setIsCheckingRecall(true);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const maxAttempts = 30; // 60 seconds
|
||||||
|
let attempts = 0;
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
const poll = async (): Promise<string | null> => {
|
||||||
|
if (attempts >= maxAttempts) {
|
||||||
|
throw new Error("Verification timeout - please try again");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for new recall logs
|
||||||
|
const response = await fetch("/api/v1/recall-logs?limit=1");
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
// Check if there's a recent recall (created after we started polling)
|
||||||
|
if (data.recallLogs && data.recallLogs.length > 0) {
|
||||||
|
const latestRecall = data.recallLogs[0];
|
||||||
|
const recallTime = new Date(latestRecall.createdAt).getTime();
|
||||||
|
|
||||||
|
// If the recall was created after we started polling
|
||||||
|
if (recallTime >= startTime) {
|
||||||
|
// Return the query as verification result
|
||||||
|
return latestRecall.query || "Recall detected successfully";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||||
|
attempts++;
|
||||||
|
|
||||||
|
return poll();
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await poll();
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
setVerificationResult(result);
|
||||||
|
setIsCheckingRecall(false);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : "Unknown error occurred");
|
||||||
|
setIsCheckingRecall(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getStepTitle = () => {
|
||||||
|
switch (currentStep) {
|
||||||
|
case OnboardingStep.PROVIDER_SELECTION:
|
||||||
|
return "Step 1 of 3";
|
||||||
|
case OnboardingStep.FIRST_INGESTION:
|
||||||
|
return "Step 2 of 3";
|
||||||
|
case OnboardingStep.VERIFICATION:
|
||||||
|
return "Step 3 of 3";
|
||||||
|
default:
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Dialog open={isOpen} onOpenChange={onClose}>
|
||||||
|
<DialogContent className="max-h-[90vh] max-w-3xl overflow-y-auto p-4">
|
||||||
|
<DialogHeader>
|
||||||
|
<div className="space-y-3">
|
||||||
|
<DialogTitle className="text-2xl">Welcome to Core</DialogTitle>
|
||||||
|
<div className="space-y-2">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<p className="text-muted-foreground text-sm">
|
||||||
|
{getStepTitle()}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<Progress
|
||||||
|
segments={[{ value: getProgress() }]}
|
||||||
|
className="mb-2"
|
||||||
|
color="#c15e50"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</DialogHeader>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
{currentStep === OnboardingStep.PROVIDER_SELECTION && (
|
||||||
|
<ProviderSelectionStep
|
||||||
|
selectedProvider={selectedProvider}
|
||||||
|
onSelectProvider={handleProviderSelect}
|
||||||
|
onContinue={handleContinueFromProvider}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{currentStep === OnboardingStep.FIRST_INGESTION &&
|
||||||
|
selectedProvider && (
|
||||||
|
<IngestionStep
|
||||||
|
providerName={PROVIDER_CONFIGS[selectedProvider].name}
|
||||||
|
ingestionStatus={ingestionStatus}
|
||||||
|
onStartWaiting={handleStartWaiting}
|
||||||
|
error={error}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{currentStep === OnboardingStep.VERIFICATION && selectedProvider && (
|
||||||
|
<VerificationStep
|
||||||
|
providerName={PROVIDER_CONFIGS[selectedProvider].name}
|
||||||
|
verificationResult={verificationResult}
|
||||||
|
isCheckingRecall={isCheckingRecall}
|
||||||
|
onStartChecking={pollRecallLogs}
|
||||||
|
onComplete={handleComplete}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</DialogContent>
|
||||||
|
</Dialog>
|
||||||
|
);
|
||||||
|
}
|
||||||
54
apps/webapp/app/components/onboarding/provider-config.ts
Normal file
54
apps/webapp/app/components/onboarding/provider-config.ts
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
import { Provider, type ProviderConfig } from "./types";
|
||||||
|
|
||||||
|
export const PROVIDER_CONFIGS: Record<Provider, ProviderConfig> = {
|
||||||
|
[Provider.CLAUDE_CODE]: {
|
||||||
|
id: Provider.CLAUDE_CODE,
|
||||||
|
name: "Claude Code CLI",
|
||||||
|
description: "Connect your Claude Code CLI to CORE's memory system",
|
||||||
|
docsUrl: "https://docs.heysol.ai/providers/claude-code",
|
||||||
|
icon: "claude",
|
||||||
|
},
|
||||||
|
[Provider.CLAUDE]: {
|
||||||
|
id: Provider.CLAUDE,
|
||||||
|
name: "Claude",
|
||||||
|
description: "Connect your Claude Desktop app to CORE's memory system",
|
||||||
|
docsUrl: "https://docs.heysol.ai/providers/claude",
|
||||||
|
icon: "claude",
|
||||||
|
},
|
||||||
|
[Provider.CURSOR]: {
|
||||||
|
id: Provider.CURSOR,
|
||||||
|
name: "Cursor",
|
||||||
|
description: "Connect your Cursor Desktop app to CORE's memory system",
|
||||||
|
docsUrl: "https://docs.heysol.ai/providers/cursor",
|
||||||
|
icon: "cursor",
|
||||||
|
},
|
||||||
|
[Provider.KILO_CODE]: {
|
||||||
|
id: Provider.KILO_CODE,
|
||||||
|
name: "Kilo-Code",
|
||||||
|
description: "Connect Kilo Code Agent to CORE's memory system via MCP",
|
||||||
|
docsUrl: "https://docs.heysol.ai/providers/kilo-code",
|
||||||
|
icon: "kilo-code",
|
||||||
|
},
|
||||||
|
[Provider.VSCODE]: {
|
||||||
|
id: Provider.VSCODE,
|
||||||
|
name: "VS Code (Github Copilot)",
|
||||||
|
description: "Connect your VS Code editor to CORE's memory system via MCP",
|
||||||
|
docsUrl: "https://docs.heysol.ai/providers/vscode",
|
||||||
|
icon: "vscode",
|
||||||
|
},
|
||||||
|
[Provider.ZED]: {
|
||||||
|
id: Provider.ZED,
|
||||||
|
name: "Zed",
|
||||||
|
description: "Connect your Zed editor to CORE's memory system via MCP",
|
||||||
|
docsUrl: "https://docs.heysol.ai/providers/zed",
|
||||||
|
icon: "zed",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export const SUGGESTED_INGESTION_PROMPTS = [
|
||||||
|
"I'm a full-stack developer working on a React and Node.js application. I prefer TypeScript, functional programming patterns, and writing comprehensive tests.",
|
||||||
|
"I'm working on a machine learning project using Python and PyTorch. I focus on computer vision and prefer Jupyter notebooks for exploration.",
|
||||||
|
"I'm a DevOps engineer managing Kubernetes clusters. I work primarily with Terraform, Helm, and CI/CD pipelines using GitHub Actions.",
|
||||||
|
];
|
||||||
|
|
||||||
|
export const VERIFICATION_PROMPT = "Who am I? Tell me what you know about me.";
|
||||||
@ -0,0 +1,89 @@
|
|||||||
|
import { Check, ExternalLink } from "lucide-react";
|
||||||
|
import { Button } from "../ui";
|
||||||
|
import { PROVIDER_CONFIGS } from "./provider-config";
|
||||||
|
import { type Provider } from "./types";
|
||||||
|
import { getIconForAuthorise } from "../icon-utils";
|
||||||
|
|
||||||
|
interface ProviderSelectionStepProps {
|
||||||
|
selectedProvider?: Provider;
|
||||||
|
onSelectProvider: (provider: Provider) => void;
|
||||||
|
onContinue: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function ProviderSelectionStep({
|
||||||
|
selectedProvider,
|
||||||
|
onSelectProvider,
|
||||||
|
onContinue,
|
||||||
|
}: ProviderSelectionStepProps) {
|
||||||
|
const providers = Object.values(PROVIDER_CONFIGS);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-2">
|
||||||
|
<div>
|
||||||
|
<h2 className="mb-2 text-xl font-semibold">Choose Your Provider</h2>
|
||||||
|
<p className="text-muted-foreground text-sm">
|
||||||
|
Select the application you'll use to connect with Core
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="grid grid-cols-1 gap-3 sm:grid-cols-2 lg:grid-cols-3">
|
||||||
|
{providers.map((provider) => {
|
||||||
|
const isSelected = selectedProvider === provider.id;
|
||||||
|
return (
|
||||||
|
<Button
|
||||||
|
key={provider.id}
|
||||||
|
variant="outline"
|
||||||
|
onClick={() => onSelectProvider(provider.id)}
|
||||||
|
size="2xl"
|
||||||
|
className={`relative flex flex-col items-start justify-center gap-1 rounded-lg border-1 border-gray-300 p-4 text-left transition-all ${
|
||||||
|
isSelected
|
||||||
|
? "border-primary bg-primary/5"
|
||||||
|
: "hover:border-primary/50 border-gray-300"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<div className="flex h-full items-center gap-2">
|
||||||
|
{getIconForAuthorise(provider.icon, 20)}
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<h3 className="font-medium">{provider.name}</h3>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Button>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{selectedProvider && (
|
||||||
|
<div className="bg-grayAlpha-100 space-y-4 rounded-lg p-4">
|
||||||
|
<div className="space-y-3">
|
||||||
|
<h3 className="font-medium">Next Steps</h3>
|
||||||
|
<p className="text-muted-foreground text-sm">
|
||||||
|
Follow our setup guide to connect{" "}
|
||||||
|
{PROVIDER_CONFIGS[selectedProvider].name} with Core. Once you've
|
||||||
|
completed the setup, come back here to continue.
|
||||||
|
</p>
|
||||||
|
<a
|
||||||
|
href={PROVIDER_CONFIGS[selectedProvider].docsUrl}
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="bg-primary text-primary-foreground hover:bg-primary/90 inline-flex items-center gap-2 rounded-md px-4 py-2 text-sm font-medium transition-colors"
|
||||||
|
>
|
||||||
|
Open Setup Guide
|
||||||
|
<ExternalLink className="h-4 w-4" />
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="flex justify-end">
|
||||||
|
<Button
|
||||||
|
onClick={onContinue}
|
||||||
|
disabled={!selectedProvider}
|
||||||
|
size="lg"
|
||||||
|
variant="secondary"
|
||||||
|
>
|
||||||
|
Continue to Setup
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
32
apps/webapp/app/components/onboarding/types.ts
Normal file
32
apps/webapp/app/components/onboarding/types.ts
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
export enum Provider {
|
||||||
|
CLAUDE_CODE = "claude-code",
|
||||||
|
CLAUDE = "claude",
|
||||||
|
CURSOR = "cursor",
|
||||||
|
KILO_CODE = "kilo-code",
|
||||||
|
VSCODE = "vscode",
|
||||||
|
ZED = "zed",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum OnboardingStep {
|
||||||
|
PROVIDER_SELECTION = "provider_selection",
|
||||||
|
FIRST_INGESTION = "first_ingestion",
|
||||||
|
VERIFICATION = "verification",
|
||||||
|
COMPLETE = "complete",
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ProviderConfig {
|
||||||
|
id: Provider;
|
||||||
|
name: string;
|
||||||
|
description: string;
|
||||||
|
docsUrl: string;
|
||||||
|
icon: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface OnboardingState {
|
||||||
|
currentStep: OnboardingStep;
|
||||||
|
selectedProvider?: Provider;
|
||||||
|
isConnected: boolean;
|
||||||
|
ingestionStatus: "idle" | "waiting" | "processing" | "complete" | "error";
|
||||||
|
verificationResult?: string;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
101
apps/webapp/app/components/onboarding/verification-step.tsx
Normal file
101
apps/webapp/app/components/onboarding/verification-step.tsx
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
import { useState } from "react";
|
||||||
|
import {
|
||||||
|
Copy,
|
||||||
|
Check,
|
||||||
|
AlertCircle,
|
||||||
|
ThumbsUp,
|
||||||
|
ThumbsDown,
|
||||||
|
Loader2,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { Button } from "../ui";
|
||||||
|
import { VERIFICATION_PROMPT } from "./provider-config";
|
||||||
|
|
||||||
|
interface VerificationStepProps {
|
||||||
|
providerName: string;
|
||||||
|
verificationResult?: string;
|
||||||
|
isCheckingRecall?: boolean;
|
||||||
|
onStartChecking: () => void;
|
||||||
|
onComplete: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function VerificationStep({
|
||||||
|
providerName,
|
||||||
|
verificationResult,
|
||||||
|
isCheckingRecall = false,
|
||||||
|
onStartChecking,
|
||||||
|
onComplete,
|
||||||
|
}: VerificationStepProps) {
|
||||||
|
const [copied, setCopied] = useState(false);
|
||||||
|
|
||||||
|
const handleCopy = async () => {
|
||||||
|
await navigator.clipboard.writeText(VERIFICATION_PROMPT);
|
||||||
|
setCopied(true);
|
||||||
|
setTimeout(() => setCopied(false), 2000);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div>
|
||||||
|
<h2 className="mb-2 text-xl font-semibold">Verify Your Memory</h2>
|
||||||
|
<p className="text-muted-foreground text-sm">
|
||||||
|
Let's test if your memory is working correctly by asking the AI about
|
||||||
|
you
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{!verificationResult && !isCheckingRecall && (
|
||||||
|
<>
|
||||||
|
<div className="group bg-grayAlpha-100 relative rounded-lg border border-gray-300 p-4">
|
||||||
|
<p className="mb-1 text-sm font-medium">Copy this prompt:</p>
|
||||||
|
<p className="pr-10 text-sm">{VERIFICATION_PROMPT}</p>
|
||||||
|
<button
|
||||||
|
onClick={handleCopy}
|
||||||
|
className="hover:bg-background absolute top-3 right-3 rounded-md p-2 transition-colors"
|
||||||
|
title="Copy to clipboard"
|
||||||
|
>
|
||||||
|
{copied ? (
|
||||||
|
<Check className="h-4 w-4 text-green-500" />
|
||||||
|
) : (
|
||||||
|
<Copy className="text-muted-foreground h-4 w-4" />
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-center gap-3 rounded-lg border border-blue-500/20 bg-blue-500/10 p-4">
|
||||||
|
<AlertCircle className="h-5 w-5 shrink-0 text-blue-500" />
|
||||||
|
<div className="flex-1 text-sm">
|
||||||
|
<p className="text-blue-600 dark:text-blue-400">
|
||||||
|
Paste this prompt in {providerName}. Once you ask, click the
|
||||||
|
button below to detect the recall.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex justify-end gap-3">
|
||||||
|
<Button onClick={onComplete} variant="ghost" size="lg">
|
||||||
|
Skip Verification
|
||||||
|
</Button>
|
||||||
|
<Button onClick={onStartChecking} size="lg" variant="secondary">
|
||||||
|
I've Asked the Question
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{isCheckingRecall && !verificationResult && (
|
||||||
|
<div className="flex flex-col items-center justify-center space-y-4 py-12">
|
||||||
|
<Loader2 className="text-primary h-12 w-12 animate-spin" />
|
||||||
|
<div className="space-y-2 text-center">
|
||||||
|
<h3 className="text-lg font-medium">
|
||||||
|
Waiting for your recall query...
|
||||||
|
</h3>
|
||||||
|
<p className="text-muted-foreground max-w-md text-sm">
|
||||||
|
Make sure you've asked "{VERIFICATION_PROMPT}" in {providerName}.
|
||||||
|
We're listening for the recall.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -53,7 +53,7 @@ export function NavUser({ user }: { user: ExtendedUser }) {
|
|||||||
<DropdownMenuSeparator />
|
<DropdownMenuSeparator />
|
||||||
<DropdownMenuItem
|
<DropdownMenuItem
|
||||||
className="flex gap-2"
|
className="flex gap-2"
|
||||||
onClick={() => navigate("/settings/api")}
|
onClick={() => navigate("/settings/account")}
|
||||||
>
|
>
|
||||||
<Settings size={16} />
|
<Settings size={16} />
|
||||||
Settings
|
Settings
|
||||||
|
|||||||
@ -99,7 +99,7 @@ export const SpaceOptions = ({ id, name, description }: SpaceOptionsProps) => {
|
|||||||
<DropdownMenuContent align="end">
|
<DropdownMenuContent align="end">
|
||||||
<DropdownMenuItem onClick={handleCopy}>
|
<DropdownMenuItem onClick={handleCopy}>
|
||||||
<Button variant="link" size="sm" className="gap-2 rounded">
|
<Button variant="link" size="sm" className="gap-2 rounded">
|
||||||
<Copy size={15} /> Copy ID
|
<Copy size={15} /> Copy Id
|
||||||
</Button>
|
</Button>
|
||||||
</DropdownMenuItem>
|
</DropdownMenuItem>
|
||||||
<DropdownMenuItem onClick={() => setEditDialogOpen(true)}>
|
<DropdownMenuItem onClick={() => setEditDialogOpen(true)}>
|
||||||
|
|||||||
@ -149,7 +149,7 @@ export const ScrollAreaWithAutoScroll = ({
|
|||||||
className?: string;
|
className?: string;
|
||||||
}) => {
|
}) => {
|
||||||
const { scrollRef } = useAutoScroll({
|
const { scrollRef } = useAutoScroll({
|
||||||
smooth: true,
|
smooth: false,
|
||||||
content: children,
|
content: children,
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -161,7 +161,7 @@ export const ScrollAreaWithAutoScroll = ({
|
|||||||
className,
|
className,
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
<div className="flex h-full w-full max-w-[97ch] flex-col pb-4">
|
<div className="flex h-full w-full max-w-[80ch] flex-col pb-4">
|
||||||
{children}
|
{children}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -2,12 +2,9 @@ import { Prisma, PrismaClient } from "@core/database";
|
|||||||
import invariant from "tiny-invariant";
|
import invariant from "tiny-invariant";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { env } from "./env.server";
|
import { env } from "./env.server";
|
||||||
import { logger } from "./services/logger.service";
|
|
||||||
import { isValidDatabaseUrl } from "./utils/db";
|
import { isValidDatabaseUrl } from "./utils/db";
|
||||||
import { singleton } from "./utils/singleton";
|
import { singleton } from "./utils/singleton";
|
||||||
|
|
||||||
import { type Span } from "@opentelemetry/api";
|
|
||||||
|
|
||||||
export { Prisma };
|
export { Prisma };
|
||||||
|
|
||||||
export const prisma = singleton("prisma", getClient);
|
export const prisma = singleton("prisma", getClient);
|
||||||
|
|||||||
@ -17,6 +17,7 @@ import { renderToPipeableStream } from "react-dom/server";
|
|||||||
import { initializeStartupServices } from "./utils/startup";
|
import { initializeStartupServices } from "./utils/startup";
|
||||||
import { handleMCPRequest, handleSessionRequest } from "~/services/mcp.server";
|
import { handleMCPRequest, handleSessionRequest } from "~/services/mcp.server";
|
||||||
import { authenticateHybridRequest } from "~/services/routeBuilders/apiBuilder.server";
|
import { authenticateHybridRequest } from "~/services/routeBuilders/apiBuilder.server";
|
||||||
|
import { trackError } from "~/services/telemetry.server";
|
||||||
|
|
||||||
const ABORT_DELAY = 5_000;
|
const ABORT_DELAY = 5_000;
|
||||||
|
|
||||||
@ -27,6 +28,42 @@ async function init() {
|
|||||||
|
|
||||||
init();
|
init();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Global error handler for all server-side errors
|
||||||
|
* This catches errors from loaders, actions, and rendering
|
||||||
|
* Automatically tracks all errors to telemetry
|
||||||
|
*/
|
||||||
|
export function handleError(
|
||||||
|
error: unknown,
|
||||||
|
{ request }: { request: Request },
|
||||||
|
): void {
|
||||||
|
// Don't track 404s or aborted requests as errors
|
||||||
|
if (
|
||||||
|
error instanceof Response &&
|
||||||
|
(error.status === 404 || error.status === 304)
|
||||||
|
) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track error to telemetry
|
||||||
|
if (error instanceof Error) {
|
||||||
|
const url = new URL(request.url);
|
||||||
|
trackError(error, {
|
||||||
|
url: request.url,
|
||||||
|
path: url.pathname,
|
||||||
|
method: request.method,
|
||||||
|
userAgent: request.headers.get("user-agent") || "unknown",
|
||||||
|
referer: request.headers.get("referer") || undefined,
|
||||||
|
}).catch((trackingError) => {
|
||||||
|
// If telemetry tracking fails, just log it - don't break the app
|
||||||
|
console.error("Failed to track error:", trackingError);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always log to console for development/debugging
|
||||||
|
console.error(error);
|
||||||
|
}
|
||||||
|
|
||||||
export default function handleRequest(
|
export default function handleRequest(
|
||||||
request: Request,
|
request: Request,
|
||||||
responseStatusCode: number,
|
responseStatusCode: number,
|
||||||
|
|||||||
@ -3,98 +3,146 @@ import { isValidDatabaseUrl } from "./utils/db";
|
|||||||
import { isValidRegex } from "./utils/regex";
|
import { isValidRegex } from "./utils/regex";
|
||||||
import { LLMModelEnum } from "@core/types";
|
import { LLMModelEnum } from "@core/types";
|
||||||
|
|
||||||
const EnvironmentSchema = z.object({
|
const EnvironmentSchema = z
|
||||||
NODE_ENV: z.union([
|
.object({
|
||||||
z.literal("development"),
|
NODE_ENV: z.union([
|
||||||
z.literal("production"),
|
z.literal("development"),
|
||||||
z.literal("test"),
|
z.literal("production"),
|
||||||
]),
|
z.literal("test"),
|
||||||
POSTGRES_DB: z.string(),
|
]),
|
||||||
DATABASE_URL: z
|
POSTGRES_DB: z.string(),
|
||||||
.string()
|
DATABASE_URL: z
|
||||||
.refine(
|
.string()
|
||||||
isValidDatabaseUrl,
|
.refine(
|
||||||
"DATABASE_URL is invalid, for details please check the additional output above this message.",
|
isValidDatabaseUrl,
|
||||||
),
|
"DATABASE_URL is invalid, for details please check the additional output above this message.",
|
||||||
DATABASE_CONNECTION_LIMIT: z.coerce.number().int().default(10),
|
),
|
||||||
DATABASE_POOL_TIMEOUT: z.coerce.number().int().default(60),
|
DATABASE_CONNECTION_LIMIT: z.coerce.number().int().default(10),
|
||||||
DATABASE_CONNECTION_TIMEOUT: z.coerce.number().int().default(20),
|
DATABASE_POOL_TIMEOUT: z.coerce.number().int().default(60),
|
||||||
DIRECT_URL: z
|
DATABASE_CONNECTION_TIMEOUT: z.coerce.number().int().default(20),
|
||||||
.string()
|
DIRECT_URL: z
|
||||||
.refine(
|
.string()
|
||||||
isValidDatabaseUrl,
|
.refine(
|
||||||
"DIRECT_URL is invalid, for details please check the additional output above this message.",
|
isValidDatabaseUrl,
|
||||||
),
|
"DIRECT_URL is invalid, for details please check the additional output above this message.",
|
||||||
DATABASE_READ_REPLICA_URL: z.string().optional(),
|
),
|
||||||
SESSION_SECRET: z.string(),
|
DATABASE_READ_REPLICA_URL: z.string().optional(),
|
||||||
ENCRYPTION_KEY: z.string(),
|
SESSION_SECRET: z.string(),
|
||||||
MAGIC_LINK_SECRET: z.string(),
|
ENCRYPTION_KEY: z.string(),
|
||||||
WHITELISTED_EMAILS: z
|
MAGIC_LINK_SECRET: z.string(),
|
||||||
.string()
|
WHITELISTED_EMAILS: z
|
||||||
.refine(isValidRegex, "WHITELISTED_EMAILS must be a valid regex.")
|
.string()
|
||||||
.optional(),
|
.refine(isValidRegex, "WHITELISTED_EMAILS must be a valid regex.")
|
||||||
ADMIN_EMAILS: z
|
.optional(),
|
||||||
.string()
|
ADMIN_EMAILS: z
|
||||||
.refine(isValidRegex, "ADMIN_EMAILS must be a valid regex.")
|
.string()
|
||||||
.optional(),
|
.refine(isValidRegex, "ADMIN_EMAILS must be a valid regex.")
|
||||||
|
.optional(),
|
||||||
|
|
||||||
APP_ENV: z.string().default(process.env.NODE_ENV),
|
APP_ENV: z.string().default(process.env.NODE_ENV),
|
||||||
LOGIN_ORIGIN: z.string().default("http://localhost:5173"),
|
LOGIN_ORIGIN: z.string().default("http://localhost:5173"),
|
||||||
APP_ORIGIN: z.string().default("http://localhost:5173"),
|
APP_ORIGIN: z.string().default("http://localhost:5173"),
|
||||||
POSTHOG_PROJECT_KEY: z.string().default(""),
|
|
||||||
|
|
||||||
//storage
|
// Telemetry
|
||||||
ACCESS_KEY_ID: z.string().optional(),
|
POSTHOG_PROJECT_KEY: z
|
||||||
SECRET_ACCESS_KEY: z.string().optional(),
|
.string()
|
||||||
BUCKET: z.string().optional(),
|
.default("phc_SwfGIzzX5gh5bazVWoRxZTBhkr7FwvzArS0NRyGXm1a"),
|
||||||
|
TELEMETRY_ENABLED: z
|
||||||
|
.string()
|
||||||
|
.optional()
|
||||||
|
.default("true")
|
||||||
|
.transform((val) => val !== "false" && val !== "0"),
|
||||||
|
TELEMETRY_ANONYMOUS: z
|
||||||
|
.string()
|
||||||
|
.optional()
|
||||||
|
.default("false")
|
||||||
|
.transform((val) => val === "true" || val === "1"),
|
||||||
|
|
||||||
// google auth
|
//storage
|
||||||
AUTH_GOOGLE_CLIENT_ID: z.string().optional(),
|
ACCESS_KEY_ID: z.string().optional(),
|
||||||
AUTH_GOOGLE_CLIENT_SECRET: z.string().optional(),
|
SECRET_ACCESS_KEY: z.string().optional(),
|
||||||
|
BUCKET: z.string().optional(),
|
||||||
|
|
||||||
ENABLE_EMAIL_LOGIN: z.coerce.boolean().default(true),
|
// google auth
|
||||||
|
AUTH_GOOGLE_CLIENT_ID: z.string().optional(),
|
||||||
|
AUTH_GOOGLE_CLIENT_SECRET: z.string().optional(),
|
||||||
|
|
||||||
//Redis
|
ENABLE_EMAIL_LOGIN: z
|
||||||
REDIS_HOST: z.string().default("localhost"),
|
.string()
|
||||||
REDIS_PORT: z.coerce.number().default(6379),
|
.optional()
|
||||||
REDIS_TLS_DISABLED: z.coerce.boolean().default(true),
|
.default("true")
|
||||||
|
.transform((val) => val !== "false" && val !== "0"),
|
||||||
|
|
||||||
//Neo4j
|
//Redis
|
||||||
NEO4J_URI: z.string(),
|
REDIS_HOST: z.string().default("localhost"),
|
||||||
NEO4J_USERNAME: z.string(),
|
REDIS_PORT: z.coerce.number().default(6379),
|
||||||
NEO4J_PASSWORD: z.string(),
|
REDIS_TLS_DISABLED: z
|
||||||
|
.string()
|
||||||
|
.optional()
|
||||||
|
.default("true")
|
||||||
|
.transform((val) => val !== "false" && val !== "0"),
|
||||||
|
|
||||||
//OpenAI
|
//Neo4j
|
||||||
OPENAI_API_KEY: z.string(),
|
NEO4J_URI: z.string(),
|
||||||
|
NEO4J_USERNAME: z.string(),
|
||||||
|
NEO4J_PASSWORD: z.string(),
|
||||||
|
|
||||||
EMAIL_TRANSPORT: z.string().optional(),
|
//OpenAI
|
||||||
FROM_EMAIL: z.string().optional(),
|
OPENAI_API_KEY: z.string().optional(),
|
||||||
REPLY_TO_EMAIL: z.string().optional(),
|
ANTHROPIC_API_KEY: z.string().optional(),
|
||||||
RESEND_API_KEY: z.string().optional(),
|
GOOGLE_GENERATIVE_AI_API_KEY: z.string().optional(),
|
||||||
SMTP_HOST: z.string().optional(),
|
|
||||||
SMTP_PORT: z.coerce.number().optional(),
|
|
||||||
SMTP_SECURE: z.coerce.boolean().optional(),
|
|
||||||
SMTP_USER: z.string().optional(),
|
|
||||||
SMTP_PASSWORD: z.string().optional(),
|
|
||||||
|
|
||||||
//Trigger
|
EMAIL_TRANSPORT: z.string().optional(),
|
||||||
TRIGGER_PROJECT_ID: z.string(),
|
FROM_EMAIL: z.string().optional(),
|
||||||
TRIGGER_SECRET_KEY: z.string(),
|
REPLY_TO_EMAIL: z.string().optional(),
|
||||||
TRIGGER_API_URL: z.string(),
|
RESEND_API_KEY: z.string().optional(),
|
||||||
TRIGGER_DB: z.string().default("trigger"),
|
SMTP_HOST: z.string().optional(),
|
||||||
|
SMTP_PORT: z.coerce.number().optional(),
|
||||||
|
SMTP_SECURE: z
|
||||||
|
.string()
|
||||||
|
.optional()
|
||||||
|
.transform((val) => val === "true" || val === "1"),
|
||||||
|
SMTP_USER: z.string().optional(),
|
||||||
|
SMTP_PASSWORD: z.string().optional(),
|
||||||
|
|
||||||
// Model envs
|
//Trigger
|
||||||
MODEL: z.string().default(LLMModelEnum.GPT41),
|
TRIGGER_PROJECT_ID: z.string().optional(),
|
||||||
EMBEDDING_MODEL: z.string().default("mxbai-embed-large"),
|
TRIGGER_SECRET_KEY: z.string().optional(),
|
||||||
EMBEDDING_MODEL_SIZE: z.string().default("1024"),
|
TRIGGER_API_URL: z.string().optional(),
|
||||||
OLLAMA_URL: z.string().optional(),
|
TRIGGER_DB: z.string().default("trigger"),
|
||||||
COHERE_API_KEY: z.string().optional(),
|
|
||||||
COHERE_SCORE_THRESHOLD: z.string().default("0.3"),
|
|
||||||
|
|
||||||
AWS_ACCESS_KEY_ID: z.string().optional(),
|
// Model envs
|
||||||
AWS_SECRET_ACCESS_KEY: z.string().optional(),
|
MODEL: z.string().default(LLMModelEnum.GPT41),
|
||||||
AWS_REGION: z.string().optional(),
|
EMBEDDING_MODEL: z.string().default("mxbai-embed-large"),
|
||||||
});
|
EMBEDDING_MODEL_SIZE: z.string().default("1024"),
|
||||||
|
OLLAMA_URL: z.string().optional(),
|
||||||
|
COHERE_API_KEY: z.string().optional(),
|
||||||
|
COHERE_SCORE_THRESHOLD: z.string().default("0.3"),
|
||||||
|
|
||||||
|
AWS_ACCESS_KEY_ID: z.string().optional(),
|
||||||
|
AWS_SECRET_ACCESS_KEY: z.string().optional(),
|
||||||
|
AWS_REGION: z.string().optional(),
|
||||||
|
|
||||||
|
// Queue provider
|
||||||
|
QUEUE_PROVIDER: z.enum(["trigger", "bullmq"]).default("trigger"),
|
||||||
|
})
|
||||||
|
.refine(
|
||||||
|
(data) => {
|
||||||
|
// If QUEUE_PROVIDER is "trigger", then Trigger.dev variables must be present
|
||||||
|
if (data.QUEUE_PROVIDER === "trigger") {
|
||||||
|
return !!(
|
||||||
|
data.TRIGGER_PROJECT_ID &&
|
||||||
|
data.TRIGGER_SECRET_KEY &&
|
||||||
|
data.TRIGGER_API_URL
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
message:
|
||||||
|
"TRIGGER_PROJECT_ID, TRIGGER_SECRET_KEY, and TRIGGER_API_URL are required when QUEUE_PROVIDER=trigger",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
export type Environment = z.infer<typeof EnvironmentSchema>;
|
export type Environment = z.infer<typeof EnvironmentSchema>;
|
||||||
export const env = EnvironmentSchema.parse(process.env);
|
export const env = EnvironmentSchema.parse(process.env);
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import { useOptionalUser, useUserChanged } from "./useUser";
|
|||||||
|
|
||||||
export const usePostHog = (
|
export const usePostHog = (
|
||||||
apiKey?: string,
|
apiKey?: string,
|
||||||
|
telemetryEnabled = true,
|
||||||
logging = false,
|
logging = false,
|
||||||
debug = false,
|
debug = false,
|
||||||
): void => {
|
): void => {
|
||||||
@ -15,6 +16,8 @@ export const usePostHog = (
|
|||||||
|
|
||||||
//start PostHog once
|
//start PostHog once
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
// Respect telemetry settings
|
||||||
|
if (!telemetryEnabled) return;
|
||||||
if (apiKey === undefined || apiKey === "") return;
|
if (apiKey === undefined || apiKey === "") return;
|
||||||
if (postHogInitialized.current === true) return;
|
if (postHogInitialized.current === true) return;
|
||||||
if (logging) console.log("Initializing PostHog");
|
if (logging) console.log("Initializing PostHog");
|
||||||
@ -27,19 +30,26 @@ export const usePostHog = (
|
|||||||
if (logging) console.log("PostHog loaded");
|
if (logging) console.log("PostHog loaded");
|
||||||
if (user !== undefined) {
|
if (user !== undefined) {
|
||||||
if (logging) console.log("Loaded: Identifying user", user);
|
if (logging) console.log("Loaded: Identifying user", user);
|
||||||
posthog.identify(user.id, { email: user.email });
|
posthog.identify(user.id, {
|
||||||
|
email: user.email,
|
||||||
|
name: user.name,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
postHogInitialized.current = true;
|
postHogInitialized.current = true;
|
||||||
}, [apiKey, logging, user]);
|
}, [apiKey, telemetryEnabled, logging, user]);
|
||||||
|
|
||||||
useUserChanged((user) => {
|
useUserChanged((user) => {
|
||||||
if (postHogInitialized.current === false) return;
|
if (postHogInitialized.current === false) return;
|
||||||
|
if (!telemetryEnabled) return;
|
||||||
if (logging) console.log("User changed");
|
if (logging) console.log("User changed");
|
||||||
if (user) {
|
if (user) {
|
||||||
if (logging) console.log("Identifying user", user);
|
if (logging) console.log("Identifying user", user);
|
||||||
posthog.identify(user.id, { email: user.email });
|
posthog.identify(user.id, {
|
||||||
|
email: user.email,
|
||||||
|
name: user.name,
|
||||||
|
});
|
||||||
} else {
|
} else {
|
||||||
if (logging) console.log("Resetting user");
|
if (logging) console.log("Resetting user");
|
||||||
posthog.reset();
|
posthog.reset();
|
||||||
|
|||||||
250
apps/webapp/app/jobs/bert/topic-analysis.logic.ts
Normal file
250
apps/webapp/app/jobs/bert/topic-analysis.logic.ts
Normal file
@ -0,0 +1,250 @@
|
|||||||
|
import { exec } from "child_process";
|
||||||
|
import { promisify } from "util";
|
||||||
|
import { identifySpacesForTopics } from "~/jobs/spaces/space-identification.logic";
|
||||||
|
import { assignEpisodesToSpace } from "~/services/graphModels/space";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import { SpaceService } from "~/services/space.server";
|
||||||
|
import { prisma } from "~/trigger/utils/prisma";
|
||||||
|
|
||||||
|
const execAsync = promisify(exec);
|
||||||
|
|
||||||
|
export interface TopicAnalysisPayload {
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
minTopicSize?: number;
|
||||||
|
nrTopics?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TopicAnalysisResult {
|
||||||
|
topics: {
|
||||||
|
[topicId: string]: {
|
||||||
|
keywords: string[];
|
||||||
|
episodeIds: string[];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run BERT analysis using exec (for BullMQ/Docker)
|
||||||
|
*/
|
||||||
|
async function runBertWithExec(
|
||||||
|
userId: string,
|
||||||
|
minTopicSize: number,
|
||||||
|
nrTopics?: number,
|
||||||
|
): Promise<string> {
|
||||||
|
let command = `python3 /core/apps/webapp/python/main.py ${userId} --json`;
|
||||||
|
|
||||||
|
if (minTopicSize) {
|
||||||
|
command += ` --min-topic-size ${minTopicSize}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nrTopics) {
|
||||||
|
command += ` --nr-topics ${nrTopics}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[BERT Topic Analysis] Executing: ${command}`);
|
||||||
|
|
||||||
|
const { stdout, stderr } = await execAsync(command, {
|
||||||
|
timeout: 300000, // 5 minutes
|
||||||
|
maxBuffer: 10 * 1024 * 1024, // 10MB buffer for large outputs
|
||||||
|
});
|
||||||
|
|
||||||
|
if (stderr) {
|
||||||
|
console.warn(`[BERT Topic Analysis] Warnings:`, stderr);
|
||||||
|
}
|
||||||
|
|
||||||
|
return stdout;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process BERT topic analysis on user's episodes
|
||||||
|
* This is the common logic shared between Trigger.dev and BullMQ
|
||||||
|
*
|
||||||
|
* NOTE: This function does NOT update workspace.metadata.lastTopicAnalysisAt
|
||||||
|
* That should be done by the caller BEFORE enqueueing this job to prevent
|
||||||
|
* duplicate analyses from racing conditions.
|
||||||
|
*/
|
||||||
|
export async function processTopicAnalysis(
|
||||||
|
payload: TopicAnalysisPayload,
|
||||||
|
enqueueSpaceSummary?: (params: {
|
||||||
|
spaceId: string;
|
||||||
|
userId: string;
|
||||||
|
}) => Promise<any>,
|
||||||
|
pythonRunner?: (
|
||||||
|
userId: string,
|
||||||
|
minTopicSize: number,
|
||||||
|
nrTopics?: number,
|
||||||
|
) => Promise<string>,
|
||||||
|
): Promise<TopicAnalysisResult> {
|
||||||
|
const { userId, workspaceId, minTopicSize = 10, nrTopics } = payload;
|
||||||
|
|
||||||
|
console.log(`[BERT Topic Analysis] Starting analysis for user: ${userId}`);
|
||||||
|
console.log(
|
||||||
|
`[BERT Topic Analysis] Parameters: minTopicSize=${minTopicSize}, nrTopics=${nrTopics || "auto"}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
// Run BERT analysis using provided runner or default exec
|
||||||
|
const runner = pythonRunner || runBertWithExec;
|
||||||
|
const stdout = await runner(userId, minTopicSize, nrTopics);
|
||||||
|
|
||||||
|
const duration = Date.now() - startTime;
|
||||||
|
console.log(`[BERT Topic Analysis] Completed in ${duration}ms`);
|
||||||
|
|
||||||
|
// Parse the JSON output
|
||||||
|
const result: TopicAnalysisResult = JSON.parse(stdout);
|
||||||
|
|
||||||
|
// Log summary
|
||||||
|
const topicCount = Object.keys(result.topics).length;
|
||||||
|
const totalEpisodes = Object.values(result.topics).reduce(
|
||||||
|
(sum, topic) => sum + topic.episodeIds.length,
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[BERT Topic Analysis] Found ${topicCount} topics covering ${totalEpisodes} episodes`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Step 2: Identify spaces for topics using LLM
|
||||||
|
try {
|
||||||
|
logger.info("[BERT Topic Analysis] Starting space identification", {
|
||||||
|
userId,
|
||||||
|
topicCount,
|
||||||
|
});
|
||||||
|
|
||||||
|
const spaceProposals = await identifySpacesForTopics({
|
||||||
|
userId,
|
||||||
|
topics: result.topics,
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info("[BERT Topic Analysis] Space identification completed", {
|
||||||
|
userId,
|
||||||
|
proposalCount: spaceProposals.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Step 3: Create or find spaces and assign episodes
|
||||||
|
// Get existing spaces from PostgreSQL
|
||||||
|
const existingSpacesFromDb = await prisma.space.findMany({
|
||||||
|
where: { workspaceId },
|
||||||
|
});
|
||||||
|
const existingSpacesByName = new Map(
|
||||||
|
existingSpacesFromDb.map((s) => [s.name.toLowerCase(), s]),
|
||||||
|
);
|
||||||
|
|
||||||
|
for (const proposal of spaceProposals) {
|
||||||
|
try {
|
||||||
|
// Check if space already exists (case-insensitive match)
|
||||||
|
let spaceId: string;
|
||||||
|
const existingSpace = existingSpacesByName.get(
|
||||||
|
proposal.name.toLowerCase(),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (existingSpace) {
|
||||||
|
// Use existing space
|
||||||
|
spaceId = existingSpace.id;
|
||||||
|
logger.info("[BERT Topic Analysis] Using existing space", {
|
||||||
|
spaceName: proposal.name,
|
||||||
|
spaceId,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Create new space (creates in both PostgreSQL and Neo4j)
|
||||||
|
// Skip automatic space assignment since we're manually assigning from BERT topics
|
||||||
|
const spaceService = new SpaceService();
|
||||||
|
const newSpace = await spaceService.createSpace({
|
||||||
|
name: proposal.name,
|
||||||
|
description: proposal.intent,
|
||||||
|
userId,
|
||||||
|
workspaceId,
|
||||||
|
});
|
||||||
|
spaceId = newSpace.id;
|
||||||
|
logger.info("[BERT Topic Analysis] Created new space", {
|
||||||
|
spaceName: proposal.name,
|
||||||
|
spaceId,
|
||||||
|
intent: proposal.intent,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect all episode IDs from the topics in this proposal
|
||||||
|
const episodeIds: string[] = [];
|
||||||
|
for (const topicId of proposal.topics) {
|
||||||
|
const topic = result.topics[topicId];
|
||||||
|
if (topic) {
|
||||||
|
episodeIds.push(...topic.episodeIds);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assign all episodes from these topics to the space
|
||||||
|
if (episodeIds.length > 0) {
|
||||||
|
await assignEpisodesToSpace(episodeIds, spaceId, userId);
|
||||||
|
logger.info("[BERT Topic Analysis] Assigned episodes to space", {
|
||||||
|
spaceName: proposal.name,
|
||||||
|
spaceId,
|
||||||
|
episodeCount: episodeIds.length,
|
||||||
|
topics: proposal.topics,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Step 4: Trigger space summary if callback provided
|
||||||
|
if (enqueueSpaceSummary) {
|
||||||
|
await enqueueSpaceSummary({ spaceId, userId });
|
||||||
|
logger.info("[BERT Topic Analysis] Triggered space summary", {
|
||||||
|
spaceName: proposal.name,
|
||||||
|
spaceId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (spaceError) {
|
||||||
|
logger.error(
|
||||||
|
"[BERT Topic Analysis] Failed to process space proposal",
|
||||||
|
{
|
||||||
|
proposal,
|
||||||
|
error: spaceError,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
// Continue with other proposals
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (spaceIdentificationError) {
|
||||||
|
logger.error(
|
||||||
|
"[BERT Topic Analysis] Space identification failed, returning topics only",
|
||||||
|
{
|
||||||
|
error: spaceIdentificationError,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
// Return topics even if space identification fails
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`[BERT Topic Analysis] Error:`, error);
|
||||||
|
|
||||||
|
if (error instanceof Error) {
|
||||||
|
// Check for timeout
|
||||||
|
if (error.message.includes("ETIMEDOUT")) {
|
||||||
|
throw new Error(
|
||||||
|
`Topic analysis timed out after 5 minutes. User may have too many episodes.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for Python errors
|
||||||
|
if (error.message.includes("python3: not found")) {
|
||||||
|
throw new Error(`Python 3 is not installed or not available in PATH.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for Neo4j connection errors
|
||||||
|
if (error.message.includes("Failed to connect to Neo4j")) {
|
||||||
|
throw new Error(
|
||||||
|
`Could not connect to Neo4j. Check NEO4J_URI and credentials.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for no episodes
|
||||||
|
if (error.message.includes("No episodes found")) {
|
||||||
|
throw new Error(`No episodes found for userId: ${userId}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
82
apps/webapp/app/jobs/conversation/create-title.logic.ts
Normal file
82
apps/webapp/app/jobs/conversation/create-title.logic.ts
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
import { conversationTitlePrompt } from "~/trigger/conversation/prompt";
|
||||||
|
import { prisma } from "~/trigger/utils/prisma";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import { generateText, type LanguageModel } from "ai";
|
||||||
|
import { getModel } from "~/lib/model.server";
|
||||||
|
|
||||||
|
export interface CreateConversationTitlePayload {
|
||||||
|
conversationId: string;
|
||||||
|
message: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateConversationTitleResult {
|
||||||
|
success: boolean;
|
||||||
|
title?: string;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Core business logic for creating conversation titles
|
||||||
|
* This is shared between Trigger.dev and BullMQ implementations
|
||||||
|
*/
|
||||||
|
export async function processConversationTitleCreation(
|
||||||
|
payload: CreateConversationTitlePayload,
|
||||||
|
): Promise<CreateConversationTitleResult> {
|
||||||
|
try {
|
||||||
|
let conversationTitleResponse = "";
|
||||||
|
const { text } = await generateText({
|
||||||
|
model: getModel() as LanguageModel,
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: conversationTitlePrompt.replace(
|
||||||
|
"{{message}}",
|
||||||
|
payload.message,
|
||||||
|
),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const outputMatch = text.match(/<output>(.*?)<\/output>/s);
|
||||||
|
|
||||||
|
logger.info(`Conversation title data: ${JSON.stringify(outputMatch)}`);
|
||||||
|
|
||||||
|
if (!outputMatch) {
|
||||||
|
logger.error("No output found in recurrence response");
|
||||||
|
throw new Error("Invalid response format from AI");
|
||||||
|
}
|
||||||
|
|
||||||
|
const jsonStr = outputMatch[1].trim();
|
||||||
|
const conversationTitleData = JSON.parse(jsonStr);
|
||||||
|
|
||||||
|
if (conversationTitleData) {
|
||||||
|
await prisma.conversation.update({
|
||||||
|
where: {
|
||||||
|
id: payload.conversationId,
|
||||||
|
},
|
||||||
|
data: {
|
||||||
|
title: conversationTitleData.title,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
title: conversationTitleData.title,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "No title generated",
|
||||||
|
};
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(
|
||||||
|
`Error creating conversation title for ${payload.conversationId}:`,
|
||||||
|
error,
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: error.message,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
290
apps/webapp/app/jobs/ingest/ingest-document.logic.ts
Normal file
290
apps/webapp/app/jobs/ingest/ingest-document.logic.ts
Normal file
@ -0,0 +1,290 @@
|
|||||||
|
import { type z } from "zod";
|
||||||
|
|
||||||
|
import { IngestionStatus } from "@core/database";
|
||||||
|
import { EpisodeTypeEnum } from "@core/types";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import { saveDocument } from "~/services/graphModels/document";
|
||||||
|
|
||||||
|
import { DocumentVersioningService } from "~/services/documentVersioning.server";
|
||||||
|
import { DocumentDifferentialService } from "~/services/documentDiffer.server";
|
||||||
|
import { KnowledgeGraphService } from "~/services/knowledgeGraph.server";
|
||||||
|
import { prisma } from "~/trigger/utils/prisma";
|
||||||
|
import { type IngestBodyRequest } from "./ingest-episode.logic";
|
||||||
|
|
||||||
|
export interface IngestDocumentPayload {
|
||||||
|
body: z.infer<typeof IngestBodyRequest>;
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
queueId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IngestDocumentResult {
|
||||||
|
success: boolean;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Core business logic for document ingestion with differential processing
|
||||||
|
* This is shared between Trigger.dev and BullMQ implementations
|
||||||
|
*
|
||||||
|
* Note: This function should NOT call trigger functions directly for chunk processing.
|
||||||
|
* Instead, use the enqueueEpisodeIngestion callback to queue episode ingestion jobs.
|
||||||
|
*/
|
||||||
|
export async function processDocumentIngestion(
|
||||||
|
payload: IngestDocumentPayload,
|
||||||
|
// Callback function for enqueueing episode ingestion for each chunk
|
||||||
|
enqueueEpisodeIngestion?: (params: {
|
||||||
|
body: any;
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
queueId: string;
|
||||||
|
}) => Promise<{ id?: string }>,
|
||||||
|
): Promise<IngestDocumentResult> {
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
try {
|
||||||
|
logger.log(`Processing document for user ${payload.userId}`, {
|
||||||
|
contentLength: payload.body.episodeBody.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
status: IngestionStatus.PROCESSING,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const documentBody = payload.body;
|
||||||
|
|
||||||
|
// Step 1: Initialize services and prepare document version
|
||||||
|
const versioningService = new DocumentVersioningService();
|
||||||
|
const differentialService = new DocumentDifferentialService();
|
||||||
|
const knowledgeGraphService = new KnowledgeGraphService();
|
||||||
|
|
||||||
|
const {
|
||||||
|
documentNode: document,
|
||||||
|
versionInfo,
|
||||||
|
chunkedDocument,
|
||||||
|
} = await versioningService.prepareDocumentVersion(
|
||||||
|
documentBody.sessionId!,
|
||||||
|
payload.userId,
|
||||||
|
documentBody.metadata?.documentTitle?.toString() || "Untitled Document",
|
||||||
|
documentBody.episodeBody,
|
||||||
|
documentBody.source,
|
||||||
|
documentBody.metadata || {},
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.log(`Document version analysis:`, {
|
||||||
|
version: versionInfo.newVersion,
|
||||||
|
isNewDocument: versionInfo.isNewDocument,
|
||||||
|
hasContentChanged: versionInfo.hasContentChanged,
|
||||||
|
changePercentage: versionInfo.chunkLevelChanges.changePercentage,
|
||||||
|
changedChunks: versionInfo.chunkLevelChanges.changedChunkIndices.length,
|
||||||
|
totalChunks: versionInfo.chunkLevelChanges.totalChunks,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Step 2: Determine processing strategy
|
||||||
|
const differentialDecision =
|
||||||
|
await differentialService.analyzeDifferentialNeed(
|
||||||
|
documentBody.episodeBody,
|
||||||
|
versionInfo.existingDocument,
|
||||||
|
chunkedDocument,
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.log(`Differential analysis:`, {
|
||||||
|
shouldUseDifferential: differentialDecision.shouldUseDifferential,
|
||||||
|
strategy: differentialDecision.strategy,
|
||||||
|
reason: differentialDecision.reason,
|
||||||
|
documentSizeTokens: differentialDecision.documentSizeTokens,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Early return for unchanged documents
|
||||||
|
if (differentialDecision.strategy === "skip_processing") {
|
||||||
|
logger.log("Document content unchanged, skipping processing");
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
status: IngestionStatus.COMPLETED,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 3: Save the new document version
|
||||||
|
await saveDocument(document);
|
||||||
|
|
||||||
|
// Step 3.1: Invalidate statements from previous document version if it exists
|
||||||
|
let invalidationResults = null;
|
||||||
|
if (versionInfo.existingDocument && versionInfo.hasContentChanged) {
|
||||||
|
logger.log(
|
||||||
|
`Invalidating statements from previous document version: ${versionInfo.existingDocument.uuid}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
invalidationResults =
|
||||||
|
await knowledgeGraphService.invalidateStatementsFromPreviousDocumentVersion(
|
||||||
|
{
|
||||||
|
previousDocumentUuid: versionInfo.existingDocument.uuid,
|
||||||
|
newDocumentContent: documentBody.episodeBody,
|
||||||
|
userId: payload.userId,
|
||||||
|
invalidatedBy: document.uuid,
|
||||||
|
semanticSimilarityThreshold: 0.75, // Configurable threshold
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.log(`Statement invalidation completed:`, {
|
||||||
|
totalAnalyzed: invalidationResults.totalStatementsAnalyzed,
|
||||||
|
invalidated: invalidationResults.invalidatedStatements.length,
|
||||||
|
preserved: invalidationResults.preservedStatements.length,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log(`Document chunked into ${chunkedDocument.chunks.length} chunks`);
|
||||||
|
|
||||||
|
// Step 4: Process chunks based on differential strategy
|
||||||
|
let chunksToProcess = chunkedDocument.chunks;
|
||||||
|
let processingMode = "full";
|
||||||
|
|
||||||
|
if (
|
||||||
|
differentialDecision.shouldUseDifferential &&
|
||||||
|
differentialDecision.strategy === "chunk_level_diff"
|
||||||
|
) {
|
||||||
|
// Only process changed chunks
|
||||||
|
const chunkComparisons = differentialService.getChunkComparisons(
|
||||||
|
versionInfo.existingDocument!,
|
||||||
|
chunkedDocument,
|
||||||
|
);
|
||||||
|
|
||||||
|
const changedIndices =
|
||||||
|
differentialService.getChunksNeedingReprocessing(chunkComparisons);
|
||||||
|
chunksToProcess = chunkedDocument.chunks.filter((chunk) =>
|
||||||
|
changedIndices.includes(chunk.chunkIndex),
|
||||||
|
);
|
||||||
|
processingMode = "differential";
|
||||||
|
|
||||||
|
logger.log(
|
||||||
|
`Differential processing: ${chunksToProcess.length}/${chunkedDocument.chunks.length} chunks need reprocessing`,
|
||||||
|
);
|
||||||
|
} else if (differentialDecision.strategy === "full_reingest") {
|
||||||
|
// Process all chunks
|
||||||
|
processingMode = "full";
|
||||||
|
logger.log(
|
||||||
|
`Full reingestion: processing all ${chunkedDocument.chunks.length} chunks`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 5: Queue chunks for processing
|
||||||
|
const episodeHandlers = [];
|
||||||
|
if (enqueueEpisodeIngestion) {
|
||||||
|
for (const chunk of chunksToProcess) {
|
||||||
|
const chunkEpisodeData = {
|
||||||
|
episodeBody: chunk.content,
|
||||||
|
referenceTime: documentBody.referenceTime,
|
||||||
|
metadata: {
|
||||||
|
...documentBody.metadata,
|
||||||
|
processingMode,
|
||||||
|
differentialStrategy: differentialDecision.strategy,
|
||||||
|
chunkHash: chunk.contentHash,
|
||||||
|
documentTitle:
|
||||||
|
documentBody.metadata?.documentTitle?.toString() ||
|
||||||
|
"Untitled Document",
|
||||||
|
chunkIndex: chunk.chunkIndex,
|
||||||
|
documentUuid: document.uuid,
|
||||||
|
},
|
||||||
|
source: documentBody.source,
|
||||||
|
spaceIds: documentBody.spaceIds,
|
||||||
|
sessionId: documentBody.sessionId,
|
||||||
|
type: EpisodeTypeEnum.DOCUMENT,
|
||||||
|
};
|
||||||
|
|
||||||
|
const episodeHandler = await enqueueEpisodeIngestion({
|
||||||
|
body: chunkEpisodeData,
|
||||||
|
userId: payload.userId,
|
||||||
|
workspaceId: payload.workspaceId,
|
||||||
|
queueId: payload.queueId,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (episodeHandler.id) {
|
||||||
|
episodeHandlers.push(episodeHandler.id);
|
||||||
|
logger.log(
|
||||||
|
`Queued chunk ${chunk.chunkIndex + 1} for ${processingMode} processing`,
|
||||||
|
{
|
||||||
|
handlerId: episodeHandler.id,
|
||||||
|
chunkSize: chunk.content.length,
|
||||||
|
chunkHash: chunk.contentHash,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate cost savings
|
||||||
|
const costSavings = differentialService.calculateCostSavings(
|
||||||
|
chunkedDocument.chunks.length,
|
||||||
|
chunksToProcess.length,
|
||||||
|
);
|
||||||
|
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
output: {
|
||||||
|
documentUuid: document.uuid,
|
||||||
|
version: versionInfo.newVersion,
|
||||||
|
totalChunks: chunkedDocument.chunks.length,
|
||||||
|
chunksProcessed: chunksToProcess.length,
|
||||||
|
chunksSkipped: costSavings.chunksSkipped,
|
||||||
|
processingMode,
|
||||||
|
differentialStrategy: differentialDecision.strategy,
|
||||||
|
estimatedSavings: `${costSavings.estimatedSavingsPercentage.toFixed(1)}%`,
|
||||||
|
statementInvalidation: invalidationResults
|
||||||
|
? {
|
||||||
|
totalAnalyzed: invalidationResults.totalStatementsAnalyzed,
|
||||||
|
invalidated: invalidationResults.invalidatedStatements.length,
|
||||||
|
preserved: invalidationResults.preservedStatements.length,
|
||||||
|
}
|
||||||
|
: null,
|
||||||
|
episodes: [],
|
||||||
|
episodeHandlers,
|
||||||
|
},
|
||||||
|
status: IngestionStatus.PROCESSING,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const processingTimeMs = Date.now() - startTime;
|
||||||
|
|
||||||
|
logger.log(
|
||||||
|
`Document differential processing completed in ${processingTimeMs}ms`,
|
||||||
|
{
|
||||||
|
documentUuid: document.uuid,
|
||||||
|
version: versionInfo.newVersion,
|
||||||
|
processingMode,
|
||||||
|
totalChunks: chunkedDocument.chunks.length,
|
||||||
|
chunksProcessed: chunksToProcess.length,
|
||||||
|
chunksSkipped: costSavings.chunksSkipped,
|
||||||
|
estimatedSavings: `${costSavings.estimatedSavingsPercentage.toFixed(1)}%`,
|
||||||
|
changePercentage: `${differentialDecision.changePercentage.toFixed(1)}%`,
|
||||||
|
statementInvalidation: invalidationResults
|
||||||
|
? {
|
||||||
|
totalAnalyzed: invalidationResults.totalStatementsAnalyzed,
|
||||||
|
invalidated: invalidationResults.invalidatedStatements.length,
|
||||||
|
preserved: invalidationResults.preservedStatements.length,
|
||||||
|
}
|
||||||
|
: "No previous version",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
return { success: true };
|
||||||
|
} catch (err: any) {
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
error: err.message,
|
||||||
|
status: IngestionStatus.FAILED,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.error(`Error processing document for user ${payload.userId}:`, err);
|
||||||
|
return { success: false, error: err.message };
|
||||||
|
}
|
||||||
|
}
|
||||||
314
apps/webapp/app/jobs/ingest/ingest-episode.logic.ts
Normal file
314
apps/webapp/app/jobs/ingest/ingest-episode.logic.ts
Normal file
@ -0,0 +1,314 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
import { KnowledgeGraphService } from "~/services/knowledgeGraph.server";
|
||||||
|
import { linkEpisodeToDocument } from "~/services/graphModels/document";
|
||||||
|
import { IngestionStatus } from "@core/database";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import { prisma } from "~/trigger/utils/prisma";
|
||||||
|
import { EpisodeType } from "@core/types";
|
||||||
|
import { deductCredits, hasCredits } from "~/trigger/utils/utils";
|
||||||
|
import { assignEpisodesToSpace } from "~/services/graphModels/space";
|
||||||
|
import {
|
||||||
|
shouldTriggerTopicAnalysis,
|
||||||
|
updateLastTopicAnalysisTime,
|
||||||
|
} from "~/services/bertTopicAnalysis.server";
|
||||||
|
|
||||||
|
export const IngestBodyRequest = z.object({
|
||||||
|
episodeBody: z.string(),
|
||||||
|
referenceTime: z.string(),
|
||||||
|
metadata: z.record(z.union([z.string(), z.number(), z.boolean()])).optional(),
|
||||||
|
source: z.string(),
|
||||||
|
spaceIds: z.array(z.string()).optional(),
|
||||||
|
sessionId: z.string().optional(),
|
||||||
|
type: z
|
||||||
|
.enum([EpisodeType.CONVERSATION, EpisodeType.DOCUMENT])
|
||||||
|
.default(EpisodeType.CONVERSATION),
|
||||||
|
});
|
||||||
|
|
||||||
|
export interface IngestEpisodePayload {
|
||||||
|
body: z.infer<typeof IngestBodyRequest>;
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
queueId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IngestEpisodeResult {
|
||||||
|
success: boolean;
|
||||||
|
episodeDetails?: any;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Core business logic for ingesting a single episode
|
||||||
|
* This is shared between Trigger.dev and BullMQ implementations
|
||||||
|
*
|
||||||
|
* Note: This function should NOT call trigger functions directly.
|
||||||
|
* Instead, return data that indicates follow-up jobs are needed,
|
||||||
|
* and let the caller (Trigger task or BullMQ worker) handle job queueing.
|
||||||
|
*/
|
||||||
|
export async function processEpisodeIngestion(
|
||||||
|
payload: IngestEpisodePayload,
|
||||||
|
// Callback functions for enqueueing follow-up jobs
|
||||||
|
enqueueSpaceAssignment?: (params: {
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
mode: "episode";
|
||||||
|
episodeIds: string[];
|
||||||
|
}) => Promise<any>,
|
||||||
|
enqueueSessionCompaction?: (params: {
|
||||||
|
userId: string;
|
||||||
|
sessionId: string;
|
||||||
|
source: string;
|
||||||
|
}) => Promise<any>,
|
||||||
|
enqueueBertTopicAnalysis?: (params: {
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
minTopicSize?: number;
|
||||||
|
nrTopics?: number;
|
||||||
|
}) => Promise<any>,
|
||||||
|
): Promise<IngestEpisodeResult> {
|
||||||
|
try {
|
||||||
|
logger.log(`Processing job for user ${payload.userId}`);
|
||||||
|
|
||||||
|
// Check if workspace has sufficient credits before processing
|
||||||
|
const hasSufficientCredits = await hasCredits(
|
||||||
|
payload.workspaceId,
|
||||||
|
"addEpisode",
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!hasSufficientCredits) {
|
||||||
|
logger.warn(`Insufficient credits for workspace ${payload.workspaceId}`);
|
||||||
|
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
status: IngestionStatus.NO_CREDITS,
|
||||||
|
error:
|
||||||
|
"Insufficient credits. Please upgrade your plan or wait for your credits to reset.",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Insufficient credits",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const ingestionQueue = await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
status: IngestionStatus.PROCESSING,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const knowledgeGraphService = new KnowledgeGraphService();
|
||||||
|
|
||||||
|
const episodeBody = payload.body as any;
|
||||||
|
|
||||||
|
const episodeDetails = await knowledgeGraphService.addEpisode(
|
||||||
|
{
|
||||||
|
...episodeBody,
|
||||||
|
userId: payload.userId,
|
||||||
|
},
|
||||||
|
prisma,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Link episode to document if it's a document chunk
|
||||||
|
if (
|
||||||
|
episodeBody.type === EpisodeType.DOCUMENT &&
|
||||||
|
episodeBody.metadata.documentUuid &&
|
||||||
|
episodeDetails.episodeUuid
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
await linkEpisodeToDocument(
|
||||||
|
episodeDetails.episodeUuid,
|
||||||
|
episodeBody.metadata.documentUuid,
|
||||||
|
episodeBody.metadata.chunkIndex || 0,
|
||||||
|
);
|
||||||
|
logger.log(
|
||||||
|
`Linked episode ${episodeDetails.episodeUuid} to document ${episodeBody.metadata.documentUuid} at chunk ${episodeBody.metadata.chunkIndex || 0}`,
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to link episode to document:`, {
|
||||||
|
error,
|
||||||
|
episodeUuid: episodeDetails.episodeUuid,
|
||||||
|
documentUuid: episodeBody.metadata.documentUuid,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let finalOutput = episodeDetails;
|
||||||
|
let episodeUuids: string[] = episodeDetails.episodeUuid
|
||||||
|
? [episodeDetails.episodeUuid]
|
||||||
|
: [];
|
||||||
|
let currentStatus: IngestionStatus = IngestionStatus.COMPLETED;
|
||||||
|
if (episodeBody.type === EpisodeType.DOCUMENT) {
|
||||||
|
const currentOutput = ingestionQueue.output as any;
|
||||||
|
currentOutput.episodes.push(episodeDetails);
|
||||||
|
episodeUuids = currentOutput.episodes.map(
|
||||||
|
(episode: any) => episode.episodeUuid,
|
||||||
|
);
|
||||||
|
|
||||||
|
finalOutput = {
|
||||||
|
...currentOutput,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (currentOutput.episodes.length !== currentOutput.totalChunks) {
|
||||||
|
currentStatus = IngestionStatus.PROCESSING;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
output: finalOutput,
|
||||||
|
status: currentStatus,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Deduct credits for episode creation
|
||||||
|
if (currentStatus === IngestionStatus.COMPLETED) {
|
||||||
|
await deductCredits(
|
||||||
|
payload.workspaceId,
|
||||||
|
"addEpisode",
|
||||||
|
finalOutput.statementsCreated,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle space assignment after successful ingestion
|
||||||
|
try {
|
||||||
|
// If spaceIds were explicitly provided, immediately assign the episode to those spaces
|
||||||
|
if (
|
||||||
|
episodeBody.spaceIds &&
|
||||||
|
episodeBody.spaceIds.length > 0 &&
|
||||||
|
episodeDetails.episodeUuid
|
||||||
|
) {
|
||||||
|
logger.info(`Assigning episode to explicitly provided spaces`, {
|
||||||
|
userId: payload.userId,
|
||||||
|
episodeId: episodeDetails.episodeUuid,
|
||||||
|
spaceIds: episodeBody.spaceIds,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Assign episode to each space
|
||||||
|
for (const spaceId of episodeBody.spaceIds) {
|
||||||
|
await assignEpisodesToSpace(
|
||||||
|
[episodeDetails.episodeUuid],
|
||||||
|
spaceId,
|
||||||
|
payload.userId,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`Skipping LLM space assignment - episode explicitly assigned to ${episodeBody.spaceIds.length} space(s)`,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
// Only trigger automatic LLM space assignment if no explicit spaceIds were provided
|
||||||
|
logger.info(
|
||||||
|
`Triggering LLM space assignment after successful ingestion`,
|
||||||
|
{
|
||||||
|
userId: payload.userId,
|
||||||
|
workspaceId: payload.workspaceId,
|
||||||
|
episodeId: episodeDetails?.episodeUuid,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
if (
|
||||||
|
episodeDetails.episodeUuid &&
|
||||||
|
currentStatus === IngestionStatus.COMPLETED &&
|
||||||
|
enqueueSpaceAssignment
|
||||||
|
) {
|
||||||
|
await enqueueSpaceAssignment({
|
||||||
|
userId: payload.userId,
|
||||||
|
workspaceId: payload.workspaceId,
|
||||||
|
mode: "episode",
|
||||||
|
episodeIds: episodeUuids,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (assignmentError) {
|
||||||
|
// Don't fail the ingestion if assignment fails
|
||||||
|
logger.warn(`Failed to trigger space assignment after ingestion:`, {
|
||||||
|
error: assignmentError,
|
||||||
|
userId: payload.userId,
|
||||||
|
episodeId: episodeDetails?.episodeUuid,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-trigger session compaction if episode has sessionId
|
||||||
|
try {
|
||||||
|
if (
|
||||||
|
episodeBody.sessionId &&
|
||||||
|
currentStatus === IngestionStatus.COMPLETED &&
|
||||||
|
enqueueSessionCompaction
|
||||||
|
) {
|
||||||
|
logger.info(`Checking if session compaction should be triggered`, {
|
||||||
|
userId: payload.userId,
|
||||||
|
sessionId: episodeBody.sessionId,
|
||||||
|
source: episodeBody.source,
|
||||||
|
});
|
||||||
|
|
||||||
|
await enqueueSessionCompaction({
|
||||||
|
userId: payload.userId,
|
||||||
|
sessionId: episodeBody.sessionId,
|
||||||
|
source: episodeBody.source,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (compactionError) {
|
||||||
|
// Don't fail the ingestion if compaction fails
|
||||||
|
logger.warn(`Failed to trigger session compaction after ingestion:`, {
|
||||||
|
error: compactionError,
|
||||||
|
userId: payload.userId,
|
||||||
|
sessionId: episodeBody.sessionId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-trigger BERT topic analysis if threshold met (20+ new episodes)
|
||||||
|
try {
|
||||||
|
if (
|
||||||
|
currentStatus === IngestionStatus.COMPLETED &&
|
||||||
|
enqueueBertTopicAnalysis
|
||||||
|
) {
|
||||||
|
const shouldTrigger = await shouldTriggerTopicAnalysis(
|
||||||
|
payload.userId,
|
||||||
|
payload.workspaceId,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (shouldTrigger) {
|
||||||
|
logger.info(
|
||||||
|
`Triggering BERT topic analysis after reaching 20+ new episodes`,
|
||||||
|
{
|
||||||
|
userId: payload.userId,
|
||||||
|
workspaceId: payload.workspaceId,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
await enqueueBertTopicAnalysis({
|
||||||
|
userId: payload.userId,
|
||||||
|
workspaceId: payload.workspaceId,
|
||||||
|
minTopicSize: 10,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update the last analysis timestamp
|
||||||
|
await updateLastTopicAnalysisTime(payload.workspaceId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (topicAnalysisError) {
|
||||||
|
// Don't fail the ingestion if topic analysis fails
|
||||||
|
logger.warn(`Failed to trigger topic analysis after ingestion:`, {
|
||||||
|
error: topicAnalysisError,
|
||||||
|
userId: payload.userId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: true, episodeDetails };
|
||||||
|
} catch (err: any) {
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
error: err.message,
|
||||||
|
status: IngestionStatus.FAILED,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.error(`Error processing job for user ${payload.userId}:`, err);
|
||||||
|
return { success: false, error: err.message };
|
||||||
|
}
|
||||||
|
}
|
||||||
455
apps/webapp/app/jobs/session/session-compaction.logic.ts
Normal file
455
apps/webapp/app/jobs/session/session-compaction.logic.ts
Normal file
@ -0,0 +1,455 @@
|
|||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import type { CoreMessage } from "ai";
|
||||||
|
import { z } from "zod";
|
||||||
|
import { getEmbedding, makeModelCall } from "~/lib/model.server";
|
||||||
|
import {
|
||||||
|
getCompactedSessionBySessionId,
|
||||||
|
linkEpisodesToCompact,
|
||||||
|
getSessionEpisodes,
|
||||||
|
type CompactedSessionNode,
|
||||||
|
type SessionEpisodeData,
|
||||||
|
saveCompactedSession,
|
||||||
|
} from "~/services/graphModels/compactedSession";
|
||||||
|
|
||||||
|
export interface SessionCompactionPayload {
|
||||||
|
userId: string;
|
||||||
|
sessionId: string;
|
||||||
|
source: string;
|
||||||
|
triggerSource?: "auto" | "manual" | "threshold";
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SessionCompactionResult {
|
||||||
|
success: boolean;
|
||||||
|
compactionResult?: {
|
||||||
|
compactUuid: string;
|
||||||
|
sessionId: string;
|
||||||
|
summary: string;
|
||||||
|
episodeCount: number;
|
||||||
|
startTime: Date;
|
||||||
|
endTime: Date;
|
||||||
|
confidence: number;
|
||||||
|
compressionRatio: number;
|
||||||
|
};
|
||||||
|
reason?: string;
|
||||||
|
episodeCount?: number;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Zod schema for LLM response validation
|
||||||
|
export const CompactionResultSchema = z.object({
|
||||||
|
summary: z.string().describe("Consolidated narrative of the entire session"),
|
||||||
|
confidence: z
|
||||||
|
.number()
|
||||||
|
.min(0)
|
||||||
|
.max(1)
|
||||||
|
.describe("Confidence score of the compaction quality"),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const CONFIG = {
|
||||||
|
minEpisodesForCompaction: 5, // Minimum episodes to trigger compaction
|
||||||
|
compactionThreshold: 1, // Trigger after N new episodes
|
||||||
|
maxEpisodesPerBatch: 50, // Process in batches if needed
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Core business logic for session compaction
|
||||||
|
* This is shared between Trigger.dev and BullMQ implementations
|
||||||
|
*/
|
||||||
|
export async function processSessionCompaction(
|
||||||
|
payload: SessionCompactionPayload,
|
||||||
|
): Promise<SessionCompactionResult> {
|
||||||
|
const { userId, sessionId, source, triggerSource = "auto" } = payload;
|
||||||
|
|
||||||
|
logger.info(`Starting session compaction`, {
|
||||||
|
userId,
|
||||||
|
sessionId,
|
||||||
|
source,
|
||||||
|
triggerSource,
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check if compaction already exists
|
||||||
|
const existingCompact = await getCompactedSessionBySessionId(
|
||||||
|
sessionId,
|
||||||
|
userId,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Fetch all episodes for this session
|
||||||
|
const episodes = await getSessionEpisodes(
|
||||||
|
sessionId,
|
||||||
|
userId,
|
||||||
|
existingCompact?.endTime,
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log("episodes", episodes.length);
|
||||||
|
// Check if we have enough episodes
|
||||||
|
if (!existingCompact && episodes.length < CONFIG.minEpisodesForCompaction) {
|
||||||
|
logger.info(`Not enough episodes for compaction`, {
|
||||||
|
sessionId,
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
minRequired: CONFIG.minEpisodesForCompaction,
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
reason: "insufficient_episodes",
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
};
|
||||||
|
} else if (
|
||||||
|
existingCompact &&
|
||||||
|
episodes.length <
|
||||||
|
CONFIG.minEpisodesForCompaction + CONFIG.compactionThreshold
|
||||||
|
) {
|
||||||
|
logger.info(`Not enough new episodes for compaction`, {
|
||||||
|
sessionId,
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
minRequired:
|
||||||
|
CONFIG.minEpisodesForCompaction + CONFIG.compactionThreshold,
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
reason: "insufficient_new_episodes",
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate or update compaction
|
||||||
|
const compactionResult = existingCompact
|
||||||
|
? await updateCompaction(existingCompact, episodes, userId)
|
||||||
|
: await createCompaction(sessionId, episodes, userId, source);
|
||||||
|
|
||||||
|
logger.info(`Session compaction completed`, {
|
||||||
|
sessionId,
|
||||||
|
compactUuid: compactionResult.uuid,
|
||||||
|
episodeCount: compactionResult.episodeCount,
|
||||||
|
compressionRatio: compactionResult.compressionRatio,
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
compactionResult: {
|
||||||
|
compactUuid: compactionResult.uuid,
|
||||||
|
sessionId: compactionResult.sessionId,
|
||||||
|
summary: compactionResult.summary,
|
||||||
|
episodeCount: compactionResult.episodeCount,
|
||||||
|
startTime: compactionResult.startTime,
|
||||||
|
endTime: compactionResult.endTime,
|
||||||
|
confidence: compactionResult.confidence,
|
||||||
|
compressionRatio: compactionResult.compressionRatio,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Session compaction failed`, {
|
||||||
|
sessionId,
|
||||||
|
userId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create new compaction
|
||||||
|
*/
|
||||||
|
async function createCompaction(
|
||||||
|
sessionId: string,
|
||||||
|
episodes: SessionEpisodeData[],
|
||||||
|
userId: string,
|
||||||
|
source: string,
|
||||||
|
): Promise<CompactedSessionNode> {
|
||||||
|
logger.info(`Creating new compaction`, {
|
||||||
|
sessionId,
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate compaction using LLM
|
||||||
|
const compactionData = await generateCompaction(episodes, null);
|
||||||
|
|
||||||
|
// Generate embedding for summary
|
||||||
|
const summaryEmbedding = await getEmbedding(compactionData.summary);
|
||||||
|
|
||||||
|
// Create CompactedSession node using graph model
|
||||||
|
const compactUuid = crypto.randomUUID();
|
||||||
|
const now = new Date();
|
||||||
|
const startTime = new Date(episodes[0].createdAt);
|
||||||
|
const endTime = new Date(episodes[episodes.length - 1].createdAt);
|
||||||
|
const episodeUuids = episodes.map((e) => e.uuid);
|
||||||
|
const compressionRatio = episodes.length / 1;
|
||||||
|
|
||||||
|
const compactNode: CompactedSessionNode = {
|
||||||
|
uuid: compactUuid,
|
||||||
|
sessionId,
|
||||||
|
summary: compactionData.summary,
|
||||||
|
summaryEmbedding,
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
startTime,
|
||||||
|
endTime,
|
||||||
|
createdAt: now,
|
||||||
|
confidence: compactionData.confidence,
|
||||||
|
userId,
|
||||||
|
source,
|
||||||
|
compressionRatio,
|
||||||
|
metadata: { triggerType: "create" },
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log("compactNode", compactNode);
|
||||||
|
// Use graph model functions
|
||||||
|
await saveCompactedSession(compactNode);
|
||||||
|
await linkEpisodesToCompact(compactUuid, episodeUuids, userId);
|
||||||
|
|
||||||
|
logger.info(`Compaction created`, {
|
||||||
|
compactUuid,
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
return compactNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update existing compaction with new episodes
|
||||||
|
*/
|
||||||
|
async function updateCompaction(
|
||||||
|
existingCompact: CompactedSessionNode,
|
||||||
|
newEpisodes: SessionEpisodeData[],
|
||||||
|
userId: string,
|
||||||
|
): Promise<CompactedSessionNode> {
|
||||||
|
logger.info(`Updating existing compaction`, {
|
||||||
|
compactUuid: existingCompact.uuid,
|
||||||
|
newEpisodeCount: newEpisodes.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate updated compaction using LLM (merging)
|
||||||
|
const compactionData = await generateCompaction(
|
||||||
|
newEpisodes,
|
||||||
|
existingCompact.summary,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Generate new embedding for updated summary
|
||||||
|
const summaryEmbedding = await getEmbedding(compactionData.summary);
|
||||||
|
|
||||||
|
// Update CompactedSession node using graph model
|
||||||
|
const now = new Date();
|
||||||
|
const endTime = newEpisodes[newEpisodes.length - 1].createdAt;
|
||||||
|
const totalEpisodeCount = existingCompact.episodeCount + newEpisodes.length;
|
||||||
|
const compressionRatio = totalEpisodeCount / 1;
|
||||||
|
const episodeUuids = newEpisodes.map((e) => e.uuid);
|
||||||
|
|
||||||
|
const updatedNode: CompactedSessionNode = {
|
||||||
|
...existingCompact,
|
||||||
|
summary: compactionData.summary,
|
||||||
|
summaryEmbedding,
|
||||||
|
episodeCount: totalEpisodeCount,
|
||||||
|
endTime,
|
||||||
|
updatedAt: now,
|
||||||
|
confidence: compactionData.confidence,
|
||||||
|
compressionRatio,
|
||||||
|
metadata: { triggerType: "update", newEpisodesAdded: newEpisodes.length },
|
||||||
|
};
|
||||||
|
|
||||||
|
// Use graph model functions
|
||||||
|
await saveCompactedSession(updatedNode);
|
||||||
|
await linkEpisodesToCompact(existingCompact.uuid, episodeUuids, userId);
|
||||||
|
|
||||||
|
logger.info(`Compaction updated`, {
|
||||||
|
compactUuid: existingCompact.uuid,
|
||||||
|
totalEpisodeCount,
|
||||||
|
});
|
||||||
|
|
||||||
|
return updatedNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate compaction using LLM (similar to Claude Code's compact approach)
|
||||||
|
*/
|
||||||
|
async function generateCompaction(
|
||||||
|
episodes: SessionEpisodeData[],
|
||||||
|
existingSummary: string | null,
|
||||||
|
): Promise<z.infer<typeof CompactionResultSchema>> {
|
||||||
|
const systemPrompt = createCompactionSystemPrompt();
|
||||||
|
const userPrompt = createCompactionUserPrompt(episodes, existingSummary);
|
||||||
|
|
||||||
|
const messages: CoreMessage[] = [
|
||||||
|
{ role: "system", content: systemPrompt },
|
||||||
|
{ role: "user", content: userPrompt },
|
||||||
|
];
|
||||||
|
|
||||||
|
logger.info(`Generating compaction with LLM`, {
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
hasExistingSummary: !!existingSummary,
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
let responseText = "";
|
||||||
|
await makeModelCall(
|
||||||
|
false,
|
||||||
|
messages,
|
||||||
|
(text: string) => {
|
||||||
|
responseText = text;
|
||||||
|
},
|
||||||
|
undefined,
|
||||||
|
"high",
|
||||||
|
);
|
||||||
|
|
||||||
|
return parseCompactionResponse(responseText);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to generate compaction`, {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* System prompt for compaction (for agent recall/context retrieval)
|
||||||
|
*/
|
||||||
|
function createCompactionSystemPrompt(): string {
|
||||||
|
return `You are a session compaction specialist. Your task is to create a rich, informative summary that will help AI agents understand what happened in this conversation session when they need context for future interactions.
|
||||||
|
|
||||||
|
## PURPOSE
|
||||||
|
|
||||||
|
This summary will be retrieved by AI agents when the user references this session in future conversations. The agent needs enough context to:
|
||||||
|
- Understand what was discussed and why
|
||||||
|
- Know what decisions were made and their rationale
|
||||||
|
- Grasp the outcome and current state
|
||||||
|
- Have relevant technical details to provide informed responses
|
||||||
|
|
||||||
|
## COMPACTION GOALS
|
||||||
|
|
||||||
|
1. **Comprehensive Context**: Capture all important information that might be referenced later
|
||||||
|
2. **Decision Documentation**: Clearly state what was decided, why, and what alternatives were considered
|
||||||
|
3. **Technical Details**: Include specific implementations, tools, configurations, and technical choices
|
||||||
|
4. **Outcome Clarity**: Make it clear what was accomplished and what the final state is
|
||||||
|
5. **Evolution Tracking**: Show how thinking or decisions evolved during the session
|
||||||
|
|
||||||
|
## COMPACTION RULES
|
||||||
|
|
||||||
|
1. **Be Information-Dense**: Pack useful details without fluff or repetition
|
||||||
|
2. **Structure Chronologically**: Start with problem/question, show progression, end with outcome
|
||||||
|
3. **Highlight Key Points**: Emphasize decisions, implementations, results, and learnings
|
||||||
|
4. **Include Specifics**: Names of libraries, specific configurations, metrics, numbers matter
|
||||||
|
5. **Resolve Contradictions**: Always use the most recent/final version when information conflicts
|
||||||
|
|
||||||
|
## OUTPUT REQUIREMENTS
|
||||||
|
|
||||||
|
- **summary**: A detailed, information-rich narrative that tells the complete story
|
||||||
|
- Structure naturally based on content - use as many paragraphs as needed
|
||||||
|
- Each distinct topic, decision, or phase should get its own paragraph(s)
|
||||||
|
- Start with context and initial problem/question
|
||||||
|
- Progress chronologically through discussions, decisions, and implementations
|
||||||
|
- **Final paragraph MUST**: State the outcome, results, and current state
|
||||||
|
- Don't artificially limit length - capture everything important
|
||||||
|
|
||||||
|
- **confidence**: Score (0-1) reflecting how well this summary captures the session's essence
|
||||||
|
|
||||||
|
Your response MUST be valid JSON wrapped in <output></output> tags.
|
||||||
|
|
||||||
|
## KEY PRINCIPLES
|
||||||
|
|
||||||
|
- Write for an AI agent that needs to help the user in future conversations
|
||||||
|
- Include technical specifics that might be referenced (library names, configurations, metrics)
|
||||||
|
- Make outcomes and current state crystal clear in the final paragraph
|
||||||
|
- Show the reasoning behind decisions, not just the decisions themselves
|
||||||
|
- Be comprehensive but concise - every sentence should add value
|
||||||
|
- Each major topic or phase deserves its own paragraph(s)
|
||||||
|
- Don't compress too much - agents need the details
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* User prompt for compaction
|
||||||
|
*/
|
||||||
|
function createCompactionUserPrompt(
|
||||||
|
episodes: SessionEpisodeData[],
|
||||||
|
existingSummary: string | null,
|
||||||
|
): string {
|
||||||
|
let prompt = "";
|
||||||
|
|
||||||
|
if (existingSummary) {
|
||||||
|
prompt += `## EXISTING SUMMARY (from previous compaction)\n\n${existingSummary}\n\n`;
|
||||||
|
prompt += `## NEW EPISODES (to merge into existing summary)\n\n`;
|
||||||
|
} else {
|
||||||
|
prompt += `## SESSION EPISODES (to compact)\n\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
episodes.forEach((episode, index) => {
|
||||||
|
const timestamp = new Date(episode.validAt).toISOString();
|
||||||
|
prompt += `### Episode ${index + 1} (${timestamp})\n`;
|
||||||
|
prompt += `Source: ${episode.source}\n`;
|
||||||
|
prompt += `Content:\n${episode.originalContent}\n\n`;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (existingSummary) {
|
||||||
|
prompt += `\n## INSTRUCTIONS\n\n`;
|
||||||
|
prompt += `Merge the new episodes into the existing summary. Update facts, add new information, and maintain narrative coherence. Ensure the consolidated summary reflects the complete session including both old and new content.\n`;
|
||||||
|
} else {
|
||||||
|
prompt += `\n## INSTRUCTIONS\n\n`;
|
||||||
|
prompt += `Create a compact summary of this entire session. Consolidate all information into a coherent narrative with deduplicated key facts.\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return prompt;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse LLM response for compaction
|
||||||
|
*/
|
||||||
|
function parseCompactionResponse(
|
||||||
|
response: string,
|
||||||
|
): z.infer<typeof CompactionResultSchema> {
|
||||||
|
try {
|
||||||
|
// Extract content from <output> tags
|
||||||
|
const outputMatch = response.match(/<output>([\s\S]*?)<\/output>/);
|
||||||
|
if (!outputMatch) {
|
||||||
|
logger.warn("No <output> tags found in LLM compaction response");
|
||||||
|
logger.debug("Full LLM response:", { response });
|
||||||
|
throw new Error("Invalid LLM response format - missing <output> tags");
|
||||||
|
}
|
||||||
|
|
||||||
|
let jsonContent = outputMatch[1].trim();
|
||||||
|
|
||||||
|
// Remove markdown code blocks if present
|
||||||
|
jsonContent = jsonContent.replace(/```json\n?/g, "").replace(/```\n?/g, "");
|
||||||
|
|
||||||
|
const parsed = JSON.parse(jsonContent);
|
||||||
|
|
||||||
|
// Validate with schema
|
||||||
|
const validated = CompactionResultSchema.parse(parsed);
|
||||||
|
|
||||||
|
return validated;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Failed to parse compaction response", {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
response: response.substring(0, 500),
|
||||||
|
});
|
||||||
|
throw new Error(`Failed to parse compaction response: ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function to check if compaction should be triggered
|
||||||
|
*/
|
||||||
|
export async function shouldTriggerCompaction(
|
||||||
|
sessionId: string,
|
||||||
|
userId: string,
|
||||||
|
): Promise<boolean> {
|
||||||
|
const existingCompact = await getCompactedSessionBySessionId(
|
||||||
|
sessionId,
|
||||||
|
userId,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!existingCompact) {
|
||||||
|
// Check if we have enough episodes for initial compaction
|
||||||
|
const episodes = await getSessionEpisodes(sessionId, userId);
|
||||||
|
return episodes.length >= CONFIG.minEpisodesForCompaction;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if we have enough new episodes to update
|
||||||
|
const newEpisodes = await getSessionEpisodes(
|
||||||
|
sessionId,
|
||||||
|
userId,
|
||||||
|
existingCompact.endTime,
|
||||||
|
);
|
||||||
|
return newEpisodes.length >= CONFIG.compactionThreshold;
|
||||||
|
}
|
||||||
1201
apps/webapp/app/jobs/spaces/space-assignment.logic.ts
Normal file
1201
apps/webapp/app/jobs/spaces/space-assignment.logic.ts
Normal file
File diff suppressed because it is too large
Load Diff
229
apps/webapp/app/jobs/spaces/space-identification.logic.ts
Normal file
229
apps/webapp/app/jobs/spaces/space-identification.logic.ts
Normal file
@ -0,0 +1,229 @@
|
|||||||
|
/**
|
||||||
|
* Space Identification Logic
|
||||||
|
*
|
||||||
|
* Uses LLM to identify appropriate spaces for topics discovered by BERT analysis
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { makeModelCall } from "~/lib/model.server";
|
||||||
|
import { getAllSpacesForUser } from "~/services/graphModels/space";
|
||||||
|
import { getEpisode } from "~/services/graphModels/episode";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import type { SpaceNode } from "@core/types";
|
||||||
|
|
||||||
|
export interface TopicData {
|
||||||
|
keywords: string[];
|
||||||
|
episodeIds: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SpaceProposal {
|
||||||
|
name: string;
|
||||||
|
intent: string;
|
||||||
|
confidence: number;
|
||||||
|
reason: string;
|
||||||
|
topics: string[]; // Array of topic IDs
|
||||||
|
}
|
||||||
|
|
||||||
|
interface IdentifySpacesParams {
|
||||||
|
userId: string;
|
||||||
|
topics: Record<string, TopicData>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Identify spaces for topics using LLM analysis
|
||||||
|
* Takes top 10 keywords and top 5 episodes per topic
|
||||||
|
*/
|
||||||
|
export async function identifySpacesForTopics(
|
||||||
|
params: IdentifySpacesParams,
|
||||||
|
): Promise<SpaceProposal[]> {
|
||||||
|
const { userId, topics } = params;
|
||||||
|
|
||||||
|
// Get existing spaces for the user
|
||||||
|
const existingSpaces = await getAllSpacesForUser(userId);
|
||||||
|
|
||||||
|
// Prepare topic data with top 10 keywords and top 5 episodes
|
||||||
|
const topicsForAnalysis = await Promise.all(
|
||||||
|
Object.entries(topics).map(async ([topicId, topicData]) => {
|
||||||
|
// Take top 10 keywords
|
||||||
|
const topKeywords = topicData.keywords.slice(0, 10);
|
||||||
|
|
||||||
|
// Take top 5 episodes and fetch their content
|
||||||
|
const topEpisodeIds = topicData.episodeIds.slice(0, 5);
|
||||||
|
const episodes = await Promise.all(
|
||||||
|
topEpisodeIds.map((id) => getEpisode(id)),
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
topicId,
|
||||||
|
keywords: topKeywords,
|
||||||
|
episodes: episodes
|
||||||
|
.filter((e) => e !== null)
|
||||||
|
.map((e) => ({
|
||||||
|
content: e!.content.substring(0, 500), // Limit to 500 chars per episode
|
||||||
|
})),
|
||||||
|
episodeCount: topicData.episodeIds.length,
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Build the prompt
|
||||||
|
const prompt = buildSpaceIdentificationPrompt(
|
||||||
|
existingSpaces,
|
||||||
|
topicsForAnalysis,
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info("Identifying spaces for topics", {
|
||||||
|
userId,
|
||||||
|
topicCount: Object.keys(topics).length,
|
||||||
|
existingSpaceCount: existingSpaces.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Call LLM with structured output
|
||||||
|
let responseText = "";
|
||||||
|
await makeModelCall(
|
||||||
|
false, // not streaming
|
||||||
|
[{ role: "user", content: prompt }],
|
||||||
|
(text) => {
|
||||||
|
responseText = text;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
temperature: 0.7,
|
||||||
|
},
|
||||||
|
"high", // Use high complexity for space identification
|
||||||
|
);
|
||||||
|
|
||||||
|
// Parse the response
|
||||||
|
const proposals = parseSpaceProposals(responseText);
|
||||||
|
|
||||||
|
logger.info("Space identification completed", {
|
||||||
|
userId,
|
||||||
|
proposalCount: proposals.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
return proposals;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build the prompt for space identification
|
||||||
|
*/
|
||||||
|
function buildSpaceIdentificationPrompt(
|
||||||
|
existingSpaces: SpaceNode[],
|
||||||
|
topics: Array<{
|
||||||
|
topicId: string;
|
||||||
|
keywords: string[];
|
||||||
|
episodes: Array<{ content: string }>;
|
||||||
|
episodeCount: number;
|
||||||
|
}>,
|
||||||
|
): string {
|
||||||
|
const existingSpacesSection =
|
||||||
|
existingSpaces.length > 0
|
||||||
|
? `## Existing Spaces
|
||||||
|
|
||||||
|
The user currently has these spaces:
|
||||||
|
${existingSpaces.map((s) => `- **${s.name}**: ${s.description || "No description"} (${s.contextCount || 0} episodes)`).join("\n")}
|
||||||
|
|
||||||
|
When identifying new spaces, consider if topics fit into existing spaces or if new spaces are needed.`
|
||||||
|
: `## Existing Spaces
|
||||||
|
|
||||||
|
The user currently has no spaces defined. This is a fresh start for space organization.`;
|
||||||
|
|
||||||
|
const topicsSection = `## Topics Discovered
|
||||||
|
|
||||||
|
BERT topic modeling has identified ${topics.length} distinct topics from the user's episodes. Each topic represents a cluster of semantically related content.
|
||||||
|
|
||||||
|
${topics
|
||||||
|
.map(
|
||||||
|
(t, idx) => `### Topic ${idx + 1} (ID: ${t.topicId})
|
||||||
|
**Episode Count**: ${t.episodeCount}
|
||||||
|
**Top Keywords**: ${t.keywords.join(", ")}
|
||||||
|
|
||||||
|
**Sample Episodes** (showing ${t.episodes.length} of ${t.episodeCount}):
|
||||||
|
${t.episodes.map((e, i) => `${i + 1}. ${e.content}`).join("\n")}
|
||||||
|
`,
|
||||||
|
)
|
||||||
|
.join("\n")}`;
|
||||||
|
|
||||||
|
return `You are a knowledge organization expert. Your task is to analyze discovered topics and identify appropriate "spaces" (thematic containers) for organizing episodic memories.
|
||||||
|
|
||||||
|
${existingSpacesSection}
|
||||||
|
|
||||||
|
${topicsSection}
|
||||||
|
|
||||||
|
## Task
|
||||||
|
|
||||||
|
Analyze the topics above and identify spaces that would help organize this content meaningfully. For each space:
|
||||||
|
|
||||||
|
1. **Consider existing spaces first**: If topics clearly belong to existing spaces, assign them there
|
||||||
|
2. **Create new spaces when needed**: If topics represent distinct themes not covered by existing spaces
|
||||||
|
3. **Group related topics**: Multiple topics can be assigned to the same space if they share a theme
|
||||||
|
4. **Aim for 20-50 episodes per space**: This is the sweet spot for space cohesion
|
||||||
|
5. **Focus on user intent**: What would help the user find and understand this content later?
|
||||||
|
|
||||||
|
## Output Format
|
||||||
|
|
||||||
|
Return your analysis as a JSON array of space proposals. Each proposal should have:
|
||||||
|
|
||||||
|
\`\`\`json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"name": "Space name (use existing space name if assigning to existing space)",
|
||||||
|
"intent": "Clear description of what this space represents",
|
||||||
|
"confidence": 0.85,
|
||||||
|
"reason": "Brief explanation of why these topics belong together",
|
||||||
|
"topics": ["topic-id-1", "topic-id-2"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
\`\`\`
|
||||||
|
|
||||||
|
**Important Guidelines**:
|
||||||
|
- **confidence**: 0.0-1.0 scale indicating how confident you are this is a good grouping
|
||||||
|
- **topics**: Array of topic IDs (use the exact IDs from above like "0", "1", "-1", etc.)
|
||||||
|
- **name**: For existing spaces, use the EXACT name. For new spaces, create a clear, concise name
|
||||||
|
- Only propose spaces with confidence >= 0.6
|
||||||
|
- Each topic should only appear in ONE space proposal
|
||||||
|
- Topic "-1" is the outlier topic (noise) - only include if it genuinely fits a theme
|
||||||
|
|
||||||
|
Return ONLY the JSON array, no additional text.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse space proposals from LLM response
|
||||||
|
*/
|
||||||
|
function parseSpaceProposals(responseText: string): SpaceProposal[] {
|
||||||
|
try {
|
||||||
|
// Extract JSON from markdown code blocks if present
|
||||||
|
const jsonMatch = responseText.match(/```(?:json)?\s*(\[[\s\S]*?\])\s*```/);
|
||||||
|
const jsonText = jsonMatch ? jsonMatch[1] : responseText;
|
||||||
|
|
||||||
|
const proposals = JSON.parse(jsonText.trim());
|
||||||
|
|
||||||
|
if (!Array.isArray(proposals)) {
|
||||||
|
throw new Error("Response is not an array");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate and filter proposals
|
||||||
|
return proposals
|
||||||
|
.filter((p) => {
|
||||||
|
return (
|
||||||
|
p.name &&
|
||||||
|
p.intent &&
|
||||||
|
typeof p.confidence === "number" &&
|
||||||
|
p.confidence >= 0.6 &&
|
||||||
|
Array.isArray(p.topics) &&
|
||||||
|
p.topics.length > 0
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.map((p) => ({
|
||||||
|
name: p.name.trim(),
|
||||||
|
intent: p.intent.trim(),
|
||||||
|
confidence: p.confidence,
|
||||||
|
reason: (p.reason || "").trim(),
|
||||||
|
topics: p.topics.map((t: any) => String(t)),
|
||||||
|
}));
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Failed to parse space proposals", {
|
||||||
|
error,
|
||||||
|
responseText: responseText.substring(0, 500),
|
||||||
|
});
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
721
apps/webapp/app/jobs/spaces/space-summary.logic.ts
Normal file
721
apps/webapp/app/jobs/spaces/space-summary.logic.ts
Normal file
@ -0,0 +1,721 @@
|
|||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import { SpaceService } from "~/services/space.server";
|
||||||
|
import { makeModelCall } from "~/lib/model.server";
|
||||||
|
import { runQuery } from "~/lib/neo4j.server";
|
||||||
|
import { updateSpaceStatus, SPACE_STATUS } from "~/trigger/utils/space-status";
|
||||||
|
import type { CoreMessage } from "ai";
|
||||||
|
import { z } from "zod";
|
||||||
|
import { getSpace, updateSpace } from "~/trigger/utils/space-utils";
|
||||||
|
import { getSpaceEpisodeCount } from "~/services/graphModels/space";
|
||||||
|
|
||||||
|
export interface SpaceSummaryPayload {
|
||||||
|
userId: string;
|
||||||
|
spaceId: string; // Single space only
|
||||||
|
triggerSource?: "assignment" | "manual" | "scheduled";
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SpaceEpisodeData {
|
||||||
|
uuid: string;
|
||||||
|
content: string;
|
||||||
|
originalContent: string;
|
||||||
|
source: string;
|
||||||
|
createdAt: Date;
|
||||||
|
validAt: Date;
|
||||||
|
metadata: any;
|
||||||
|
sessionId: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SpaceSummaryData {
|
||||||
|
spaceId: string;
|
||||||
|
spaceName: string;
|
||||||
|
spaceDescription?: string;
|
||||||
|
contextCount: number;
|
||||||
|
summary: string;
|
||||||
|
keyEntities: string[];
|
||||||
|
themes: string[];
|
||||||
|
confidence: number;
|
||||||
|
lastUpdated: Date;
|
||||||
|
isIncremental: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Zod schema for LLM response validation
|
||||||
|
const SummaryResultSchema = z.object({
|
||||||
|
summary: z.string(),
|
||||||
|
keyEntities: z.array(z.string()),
|
||||||
|
themes: z.array(z.string()),
|
||||||
|
confidence: z.number().min(0).max(1),
|
||||||
|
});
|
||||||
|
|
||||||
|
const CONFIG = {
|
||||||
|
maxEpisodesForSummary: 20, // Limit episodes for performance
|
||||||
|
minEpisodesForSummary: 1, // Minimum episodes to generate summary
|
||||||
|
summaryEpisodeThreshold: 5, // Minimum new episodes required to trigger summary (configurable)
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface SpaceSummaryResult {
|
||||||
|
success: boolean;
|
||||||
|
spaceId: string;
|
||||||
|
triggerSource: string;
|
||||||
|
summary?: {
|
||||||
|
statementCount: number;
|
||||||
|
confidence: number;
|
||||||
|
themesCount: number;
|
||||||
|
} | null;
|
||||||
|
reason?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Core business logic for space summary generation
|
||||||
|
* This is shared between Trigger.dev and BullMQ implementations
|
||||||
|
*/
|
||||||
|
export async function processSpaceSummary(
|
||||||
|
payload: SpaceSummaryPayload,
|
||||||
|
): Promise<SpaceSummaryResult> {
|
||||||
|
const { userId, spaceId, triggerSource = "manual" } = payload;
|
||||||
|
|
||||||
|
logger.info(`Starting space summary generation`, {
|
||||||
|
userId,
|
||||||
|
spaceId,
|
||||||
|
triggerSource,
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Update status to processing
|
||||||
|
await updateSpaceStatus(spaceId, SPACE_STATUS.PROCESSING, {
|
||||||
|
userId,
|
||||||
|
operation: "space-summary",
|
||||||
|
metadata: { triggerSource, phase: "start_summary" },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate summary for the single space
|
||||||
|
const summaryResult = await generateSpaceSummary(
|
||||||
|
spaceId,
|
||||||
|
userId,
|
||||||
|
triggerSource,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (summaryResult) {
|
||||||
|
// Store the summary
|
||||||
|
await storeSummary(summaryResult);
|
||||||
|
|
||||||
|
// Update status to ready after successful completion
|
||||||
|
await updateSpaceStatus(spaceId, SPACE_STATUS.READY, {
|
||||||
|
userId,
|
||||||
|
operation: "space-summary",
|
||||||
|
metadata: {
|
||||||
|
triggerSource,
|
||||||
|
phase: "completed_summary",
|
||||||
|
contextCount: summaryResult.contextCount,
|
||||||
|
confidence: summaryResult.confidence,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`Generated summary for space ${spaceId}`, {
|
||||||
|
statementCount: summaryResult.contextCount,
|
||||||
|
confidence: summaryResult.confidence,
|
||||||
|
themes: summaryResult.themes.length,
|
||||||
|
triggerSource,
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
spaceId,
|
||||||
|
triggerSource,
|
||||||
|
summary: {
|
||||||
|
statementCount: summaryResult.contextCount,
|
||||||
|
confidence: summaryResult.confidence,
|
||||||
|
themesCount: summaryResult.themes.length,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// No summary generated - this could be due to insufficient episodes or no new episodes
|
||||||
|
// This is not an error state, so update status to ready
|
||||||
|
await updateSpaceStatus(spaceId, SPACE_STATUS.READY, {
|
||||||
|
userId,
|
||||||
|
operation: "space-summary",
|
||||||
|
metadata: {
|
||||||
|
triggerSource,
|
||||||
|
phase: "no_summary_needed",
|
||||||
|
reason: "Insufficient episodes or no new episodes to summarize",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`No summary generated for space ${spaceId} - insufficient or no new episodes`,
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
spaceId,
|
||||||
|
triggerSource,
|
||||||
|
summary: null,
|
||||||
|
reason: "No episodes to summarize",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Update status to error on exception
|
||||||
|
try {
|
||||||
|
await updateSpaceStatus(spaceId, SPACE_STATUS.ERROR, {
|
||||||
|
userId,
|
||||||
|
operation: "space-summary",
|
||||||
|
metadata: {
|
||||||
|
triggerSource,
|
||||||
|
phase: "exception",
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (statusError) {
|
||||||
|
logger.warn(`Failed to update status to error for space ${spaceId}`, {
|
||||||
|
statusError,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error(
|
||||||
|
`Error in space summary generation for space ${spaceId}:`,
|
||||||
|
error as Record<string, unknown>,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function generateSpaceSummary(
|
||||||
|
spaceId: string,
|
||||||
|
userId: string,
|
||||||
|
triggerSource?: "assignment" | "manual" | "scheduled",
|
||||||
|
): Promise<SpaceSummaryData | null> {
|
||||||
|
try {
|
||||||
|
// 1. Get space details
|
||||||
|
const spaceService = new SpaceService();
|
||||||
|
const space = await spaceService.getSpace(spaceId, userId);
|
||||||
|
|
||||||
|
if (!space) {
|
||||||
|
logger.warn(`Space ${spaceId} not found for user ${userId}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Check episode count threshold (skip for manual triggers)
|
||||||
|
if (triggerSource !== "manual") {
|
||||||
|
const currentEpisodeCount = await getSpaceEpisodeCount(spaceId, userId);
|
||||||
|
const lastSummaryEpisodeCount = space.contextCount || 0;
|
||||||
|
const episodeDifference = currentEpisodeCount - lastSummaryEpisodeCount;
|
||||||
|
|
||||||
|
if (
|
||||||
|
episodeDifference < CONFIG.summaryEpisodeThreshold ||
|
||||||
|
lastSummaryEpisodeCount !== 0
|
||||||
|
) {
|
||||||
|
logger.info(
|
||||||
|
`Skipping summary generation for space ${spaceId}: only ${episodeDifference} new episodes (threshold: ${CONFIG.summaryEpisodeThreshold})`,
|
||||||
|
{
|
||||||
|
currentEpisodeCount,
|
||||||
|
lastSummaryEpisodeCount,
|
||||||
|
episodeDifference,
|
||||||
|
threshold: CONFIG.summaryEpisodeThreshold,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`Proceeding with summary generation for space ${spaceId}: ${episodeDifference} new episodes (threshold: ${CONFIG.summaryEpisodeThreshold})`,
|
||||||
|
{
|
||||||
|
currentEpisodeCount,
|
||||||
|
lastSummaryEpisodeCount,
|
||||||
|
episodeDifference,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Check for existing summary
|
||||||
|
const existingSummary = await getExistingSummary(spaceId);
|
||||||
|
const isIncremental = existingSummary !== null;
|
||||||
|
|
||||||
|
// 3. Get episodes (all or new ones based on existing summary)
|
||||||
|
const episodes = await getSpaceEpisodes(
|
||||||
|
spaceId,
|
||||||
|
userId,
|
||||||
|
isIncremental ? existingSummary?.lastUpdated : undefined,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Handle case where no new episodes exist for incremental update
|
||||||
|
if (isIncremental && episodes.length === 0) {
|
||||||
|
logger.info(
|
||||||
|
`No new episodes found for space ${spaceId}, skipping summary update`,
|
||||||
|
);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check minimum episode requirement for new summaries only
|
||||||
|
if (!isIncremental && episodes.length < CONFIG.minEpisodesForSummary) {
|
||||||
|
logger.info(
|
||||||
|
`Space ${spaceId} has insufficient episodes (${episodes.length}) for new summary`,
|
||||||
|
);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Process episodes using unified approach
|
||||||
|
let summaryResult;
|
||||||
|
|
||||||
|
if (episodes.length > CONFIG.maxEpisodesForSummary) {
|
||||||
|
logger.info(
|
||||||
|
`Large space detected (${episodes.length} episodes). Processing in batches.`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Process in batches, each building on previous result
|
||||||
|
const batches: SpaceEpisodeData[][] = [];
|
||||||
|
for (let i = 0; i < episodes.length; i += CONFIG.maxEpisodesForSummary) {
|
||||||
|
batches.push(episodes.slice(i, i + CONFIG.maxEpisodesForSummary));
|
||||||
|
}
|
||||||
|
|
||||||
|
let currentSummary = existingSummary?.summary || null;
|
||||||
|
let currentThemes = existingSummary?.themes || [];
|
||||||
|
let cumulativeConfidence = 0;
|
||||||
|
|
||||||
|
for (const [batchIndex, batch] of batches.entries()) {
|
||||||
|
logger.info(
|
||||||
|
`Processing batch ${batchIndex + 1}/${batches.length} with ${batch.length} episodes`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const batchResult = await generateUnifiedSummary(
|
||||||
|
space.name,
|
||||||
|
space.description as string,
|
||||||
|
batch,
|
||||||
|
currentSummary,
|
||||||
|
currentThemes,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (batchResult) {
|
||||||
|
currentSummary = batchResult.summary;
|
||||||
|
currentThemes = batchResult.themes;
|
||||||
|
cumulativeConfidence += batchResult.confidence;
|
||||||
|
} else {
|
||||||
|
logger.warn(`Failed to process batch ${batchIndex + 1}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Small delay between batches
|
||||||
|
if (batchIndex < batches.length - 1) {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
summaryResult = currentSummary
|
||||||
|
? {
|
||||||
|
summary: currentSummary,
|
||||||
|
themes: currentThemes,
|
||||||
|
confidence: Math.min(cumulativeConfidence / batches.length, 1.0),
|
||||||
|
}
|
||||||
|
: null;
|
||||||
|
} else {
|
||||||
|
logger.info(
|
||||||
|
`Processing ${episodes.length} episodes with unified approach`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Use unified approach for smaller spaces
|
||||||
|
summaryResult = await generateUnifiedSummary(
|
||||||
|
space.name,
|
||||||
|
space.description as string,
|
||||||
|
episodes,
|
||||||
|
existingSummary?.summary || null,
|
||||||
|
existingSummary?.themes || [],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!summaryResult) {
|
||||||
|
logger.warn(`Failed to generate LLM summary for space ${spaceId}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the actual current counts from Neo4j
|
||||||
|
const currentEpisodeCount = await getSpaceEpisodeCount(spaceId, userId);
|
||||||
|
|
||||||
|
return {
|
||||||
|
spaceId: space.uuid,
|
||||||
|
spaceName: space.name,
|
||||||
|
spaceDescription: space.description as string,
|
||||||
|
contextCount: currentEpisodeCount,
|
||||||
|
summary: summaryResult.summary,
|
||||||
|
keyEntities: summaryResult.keyEntities || [],
|
||||||
|
themes: summaryResult.themes,
|
||||||
|
confidence: summaryResult.confidence,
|
||||||
|
lastUpdated: new Date(),
|
||||||
|
isIncremental,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(
|
||||||
|
`Error generating summary for space ${spaceId}:`,
|
||||||
|
error as Record<string, unknown>,
|
||||||
|
);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function generateUnifiedSummary(
|
||||||
|
spaceName: string,
|
||||||
|
spaceDescription: string | undefined,
|
||||||
|
episodes: SpaceEpisodeData[],
|
||||||
|
previousSummary: string | null = null,
|
||||||
|
previousThemes: string[] = [],
|
||||||
|
): Promise<{
|
||||||
|
summary: string;
|
||||||
|
themes: string[];
|
||||||
|
confidence: number;
|
||||||
|
keyEntities?: string[];
|
||||||
|
} | null> {
|
||||||
|
try {
|
||||||
|
const prompt = createUnifiedSummaryPrompt(
|
||||||
|
spaceName,
|
||||||
|
spaceDescription,
|
||||||
|
episodes,
|
||||||
|
previousSummary,
|
||||||
|
previousThemes,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Space summary generation requires HIGH complexity (creative synthesis, narrative generation)
|
||||||
|
let responseText = "";
|
||||||
|
await makeModelCall(
|
||||||
|
false,
|
||||||
|
prompt,
|
||||||
|
(text: string) => {
|
||||||
|
responseText = text;
|
||||||
|
},
|
||||||
|
undefined,
|
||||||
|
"high",
|
||||||
|
);
|
||||||
|
|
||||||
|
return parseSummaryResponse(responseText);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(
|
||||||
|
"Error generating unified summary:",
|
||||||
|
error as Record<string, unknown>,
|
||||||
|
);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function createUnifiedSummaryPrompt(
|
||||||
|
spaceName: string,
|
||||||
|
spaceDescription: string | undefined,
|
||||||
|
episodes: SpaceEpisodeData[],
|
||||||
|
previousSummary: string | null,
|
||||||
|
previousThemes: string[],
|
||||||
|
): CoreMessage[] {
|
||||||
|
// If there are no episodes and no previous summary, we cannot generate a meaningful summary
|
||||||
|
if (episodes.length === 0 && previousSummary === null) {
|
||||||
|
throw new Error(
|
||||||
|
"Cannot generate summary without episodes or existing summary",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const episodesText = episodes
|
||||||
|
.map(
|
||||||
|
(episode) =>
|
||||||
|
`- ${episode.content} (Source: ${episode.source}, Session: ${episode.sessionId || "N/A"})`,
|
||||||
|
)
|
||||||
|
.join("\n");
|
||||||
|
|
||||||
|
// Extract key entities and themes from episode content
|
||||||
|
const contentWords = episodes
|
||||||
|
.map((ep) => ep.content.toLowerCase())
|
||||||
|
.join(" ")
|
||||||
|
.split(/\s+/)
|
||||||
|
.filter((word) => word.length > 3);
|
||||||
|
|
||||||
|
const wordFrequency = new Map<string, number>();
|
||||||
|
contentWords.forEach((word) => {
|
||||||
|
wordFrequency.set(word, (wordFrequency.get(word) || 0) + 1);
|
||||||
|
});
|
||||||
|
|
||||||
|
const topEntities = Array.from(wordFrequency.entries())
|
||||||
|
.sort(([, a], [, b]) => b - a)
|
||||||
|
.slice(0, 10)
|
||||||
|
.map(([word]) => word);
|
||||||
|
|
||||||
|
const isUpdate = previousSummary !== null;
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: `You are an expert at analyzing and summarizing episodes within semantic spaces based on the space's intent and purpose. Your task is to ${isUpdate ? "update an existing summary by integrating new episodes" : "create a comprehensive summary of episodes"}.
|
||||||
|
|
||||||
|
CRITICAL RULES:
|
||||||
|
1. Base your summary ONLY on insights derived from the actual content/episodes provided
|
||||||
|
2. Use the space's INTENT/PURPOSE (from description) to guide what to summarize and how to organize it
|
||||||
|
3. Write in a factual, neutral tone - avoid promotional language ("pivotal", "invaluable", "cutting-edge")
|
||||||
|
4. Be specific and concrete - reference actual content, patterns, and insights found in the episodes
|
||||||
|
5. If episodes are insufficient for meaningful insights, state that more data is needed
|
||||||
|
|
||||||
|
INTENT-DRIVEN SUMMARIZATION:
|
||||||
|
Your summary should SERVE the space's intended purpose. Examples:
|
||||||
|
- "Learning React" → Summarize React concepts, patterns, techniques learned
|
||||||
|
- "Project X Updates" → Summarize progress, decisions, blockers, next steps
|
||||||
|
- "Health Tracking" → Summarize metrics, trends, observations, insights
|
||||||
|
- "Guidelines for React" → Extract actionable patterns, best practices, rules
|
||||||
|
- "Evolution of design thinking" → Track how thinking changed over time, decision points
|
||||||
|
The intent defines WHY this space exists - organize content to serve that purpose.
|
||||||
|
|
||||||
|
INSTRUCTIONS:
|
||||||
|
${
|
||||||
|
isUpdate
|
||||||
|
? `1. Review the existing summary and themes carefully
|
||||||
|
2. Analyze the new episodes for patterns and insights that align with the space's intent
|
||||||
|
3. Identify connecting points between existing knowledge and new episodes
|
||||||
|
4. Update the summary to seamlessly integrate new information while preserving valuable existing insights
|
||||||
|
5. Evolve themes by adding new ones or refining existing ones based on the space's purpose
|
||||||
|
6. Organize the summary to serve the space's intended use case`
|
||||||
|
: `1. Analyze the semantic content and relationships within the episodes
|
||||||
|
2. Identify topics/sections that align with the space's INTENT and PURPOSE
|
||||||
|
3. Create a coherent summary that serves the space's intended use case
|
||||||
|
4. Organize the summary based on the space's purpose (not generic frequency-based themes)`
|
||||||
|
}
|
||||||
|
${isUpdate ? "7" : "5"}. Assess your confidence in the ${isUpdate ? "updated" : ""} summary quality (0.0-1.0)
|
||||||
|
|
||||||
|
INTENT-ALIGNED ORGANIZATION:
|
||||||
|
- Organize sections based on what serves the space's purpose
|
||||||
|
- Topics don't need minimum episode counts - relevance to intent matters most
|
||||||
|
- Each section should provide value aligned with the space's intended use
|
||||||
|
- For "guidelines" spaces: focus on actionable patterns
|
||||||
|
- For "tracking" spaces: focus on temporal patterns and changes
|
||||||
|
- For "learning" spaces: focus on concepts and insights gained
|
||||||
|
- Let the space's intent drive the structure, not rigid rules
|
||||||
|
|
||||||
|
${
|
||||||
|
isUpdate
|
||||||
|
? `CONNECTION FOCUS:
|
||||||
|
- Entity relationships that span across batches/time
|
||||||
|
- Theme evolution and expansion
|
||||||
|
- Temporal patterns and progressions
|
||||||
|
- Contradictions or confirmations of existing insights
|
||||||
|
- New insights that complement existing knowledge`
|
||||||
|
: ""
|
||||||
|
}
|
||||||
|
|
||||||
|
RESPONSE FORMAT:
|
||||||
|
Provide your response inside <output></output> tags with valid JSON. Include both HTML summary and markdown format.
|
||||||
|
|
||||||
|
<output>
|
||||||
|
{
|
||||||
|
"summary": "${isUpdate ? "Updated HTML summary that integrates new insights with existing knowledge. Write factually about what the statements reveal - mention specific entities, relationships, and patterns found in the data. Avoid marketing language. Use HTML tags for structure." : "Factual HTML summary based on patterns found in the statements. Report what the data actually shows - specific entities, relationships, frequencies, and concrete insights. Avoid promotional language. Use HTML tags like <p>, <strong>, <ul>, <li> for structure. Keep it concise and evidence-based."}",
|
||||||
|
"keyEntities": ["entity1", "entity2", "entity3"],
|
||||||
|
"themes": ["${isUpdate ? 'updated_theme1", "new_theme2", "evolved_theme3' : 'theme1", "theme2", "theme3'}"],
|
||||||
|
"confidence": 0.85
|
||||||
|
}
|
||||||
|
</output>
|
||||||
|
|
||||||
|
JSON FORMATTING RULES:
|
||||||
|
- HTML content in summary field is allowed and encouraged
|
||||||
|
- Escape quotes within strings as \"
|
||||||
|
- Escape HTML angle brackets if needed: < and >
|
||||||
|
- Use proper HTML tags for structure: <p>, <strong>, <em>, <ul>, <li>, <h3>, etc.
|
||||||
|
- HTML content should be well-formed and semantic
|
||||||
|
|
||||||
|
GUIDELINES:
|
||||||
|
${
|
||||||
|
isUpdate
|
||||||
|
? `- Preserve valuable insights from existing summary
|
||||||
|
- Integrate new information by highlighting connections
|
||||||
|
- Themes should evolve naturally, don't replace wholesale
|
||||||
|
- The updated summary should read as a coherent whole
|
||||||
|
- Make the summary user-friendly and explain what value this space provides`
|
||||||
|
: `- Report only what the episodes actually reveal - be specific and concrete
|
||||||
|
- Cite actual content and patterns found in the episodes
|
||||||
|
- Avoid generic descriptions that could apply to any space
|
||||||
|
- Use neutral, factual language - no "comprehensive", "robust", "cutting-edge" etc.
|
||||||
|
- Themes must be backed by at least 3 supporting episodes with clear evidence
|
||||||
|
- Better to have fewer, well-supported themes than many weak ones
|
||||||
|
- Confidence should reflect actual data quality and coverage, not aspirational goals`
|
||||||
|
}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: `SPACE INFORMATION:
|
||||||
|
Name: "${spaceName}"
|
||||||
|
Intent/Purpose: ${spaceDescription || "No specific intent provided - organize naturally based on content"}
|
||||||
|
|
||||||
|
${
|
||||||
|
isUpdate
|
||||||
|
? `EXISTING SUMMARY:
|
||||||
|
${previousSummary}
|
||||||
|
|
||||||
|
EXISTING THEMES:
|
||||||
|
${previousThemes.join(", ")}
|
||||||
|
|
||||||
|
NEW EPISODES TO INTEGRATE (${episodes.length} episodes):`
|
||||||
|
: `EPISODES IN THIS SPACE (${episodes.length} episodes):`
|
||||||
|
}
|
||||||
|
${episodesText}
|
||||||
|
|
||||||
|
${
|
||||||
|
episodes.length > 0
|
||||||
|
? `TOP WORDS BY FREQUENCY:
|
||||||
|
${topEntities.join(", ")}`
|
||||||
|
: ""
|
||||||
|
}
|
||||||
|
|
||||||
|
${
|
||||||
|
isUpdate
|
||||||
|
? "Please identify connections between the existing summary and new episodes, then update the summary to integrate the new insights coherently. Organize the summary to SERVE the space's intent/purpose. Remember: only summarize insights from the actual episode content."
|
||||||
|
: "Please analyze the episodes and provide a comprehensive summary that SERVES the space's intent/purpose. Organize sections based on what would be most valuable for this space's intended use case. If the intent is unclear, organize naturally based on content patterns. Only summarize insights from actual episode content."
|
||||||
|
}`,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getExistingSummary(spaceId: string): Promise<{
|
||||||
|
summary: string;
|
||||||
|
themes: string[];
|
||||||
|
lastUpdated: Date;
|
||||||
|
contextCount: number;
|
||||||
|
} | null> {
|
||||||
|
try {
|
||||||
|
const existingSummary = await getSpace(spaceId);
|
||||||
|
|
||||||
|
if (existingSummary?.summary) {
|
||||||
|
return {
|
||||||
|
summary: existingSummary.summary,
|
||||||
|
themes: existingSummary.themes,
|
||||||
|
lastUpdated: existingSummary.summaryGeneratedAt || new Date(),
|
||||||
|
contextCount: existingSummary.contextCount || 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`Failed to get existing summary for space ${spaceId}:`, {
|
||||||
|
error,
|
||||||
|
});
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getSpaceEpisodes(
|
||||||
|
spaceId: string,
|
||||||
|
userId: string,
|
||||||
|
sinceDate?: Date,
|
||||||
|
): Promise<SpaceEpisodeData[]> {
|
||||||
|
// Query episodes directly using Space-[:HAS_EPISODE]->Episode relationships
|
||||||
|
const params: any = { spaceId, userId };
|
||||||
|
|
||||||
|
let dateCondition = "";
|
||||||
|
if (sinceDate) {
|
||||||
|
dateCondition = "AND e.createdAt > $sinceDate";
|
||||||
|
params.sinceDate = sinceDate.toISOString();
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = `
|
||||||
|
MATCH (space:Space {uuid: $spaceId, userId: $userId})-[:HAS_EPISODE]->(e:Episode {userId: $userId})
|
||||||
|
WHERE e IS NOT NULL ${dateCondition}
|
||||||
|
RETURN DISTINCT e
|
||||||
|
ORDER BY e.createdAt DESC
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await runQuery(query, params);
|
||||||
|
|
||||||
|
return result.map((record) => {
|
||||||
|
const episode = record.get("e").properties;
|
||||||
|
return {
|
||||||
|
uuid: episode.uuid,
|
||||||
|
content: episode.content,
|
||||||
|
originalContent: episode.originalContent,
|
||||||
|
source: episode.source,
|
||||||
|
createdAt: new Date(episode.createdAt),
|
||||||
|
validAt: new Date(episode.validAt),
|
||||||
|
metadata: JSON.parse(episode.metadata || "{}"),
|
||||||
|
sessionId: episode.sessionId,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseSummaryResponse(response: string): {
|
||||||
|
summary: string;
|
||||||
|
themes: string[];
|
||||||
|
confidence: number;
|
||||||
|
keyEntities?: string[];
|
||||||
|
} | null {
|
||||||
|
try {
|
||||||
|
// Extract content from <output> tags
|
||||||
|
const outputMatch = response.match(/<output>([\s\S]*?)<\/output>/);
|
||||||
|
if (!outputMatch) {
|
||||||
|
logger.warn("No <output> tags found in LLM summary response");
|
||||||
|
logger.debug("Full LLM response:", { response });
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
let jsonContent = outputMatch[1].trim();
|
||||||
|
|
||||||
|
let parsed;
|
||||||
|
try {
|
||||||
|
parsed = JSON.parse(jsonContent);
|
||||||
|
} catch (jsonError) {
|
||||||
|
logger.warn("JSON parsing failed, attempting cleanup and retry", {
|
||||||
|
originalError: jsonError,
|
||||||
|
jsonContent: jsonContent.substring(0, 500) + "...", // Log first 500 chars
|
||||||
|
});
|
||||||
|
|
||||||
|
// More aggressive cleanup for malformed JSON
|
||||||
|
jsonContent = jsonContent
|
||||||
|
.replace(/([^\\])"/g, '$1\\"') // Escape unescaped quotes
|
||||||
|
.replace(/^"/g, '\\"') // Escape quotes at start
|
||||||
|
.replace(/\\\\"/g, '\\"'); // Fix double-escaped quotes
|
||||||
|
|
||||||
|
parsed = JSON.parse(jsonContent);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate the response structure
|
||||||
|
const validationResult = SummaryResultSchema.safeParse(parsed);
|
||||||
|
if (!validationResult.success) {
|
||||||
|
logger.warn("Invalid LLM summary response format:", {
|
||||||
|
error: validationResult.error,
|
||||||
|
parsedData: parsed,
|
||||||
|
});
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return validationResult.data;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(
|
||||||
|
"Error parsing LLM summary response:",
|
||||||
|
error as Record<string, unknown>,
|
||||||
|
);
|
||||||
|
logger.debug("Failed response content:", { response });
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function storeSummary(summaryData: SpaceSummaryData): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Store in PostgreSQL for API access and persistence
|
||||||
|
await updateSpace(summaryData);
|
||||||
|
|
||||||
|
// Also store in Neo4j for graph-based queries
|
||||||
|
const query = `
|
||||||
|
MATCH (space:Space {uuid: $spaceId})
|
||||||
|
SET space.summary = $summary,
|
||||||
|
space.keyEntities = $keyEntities,
|
||||||
|
space.themes = $themes,
|
||||||
|
space.summaryConfidence = $confidence,
|
||||||
|
space.summaryContextCount = $contextCount,
|
||||||
|
space.summaryLastUpdated = datetime($lastUpdated)
|
||||||
|
RETURN space
|
||||||
|
`;
|
||||||
|
|
||||||
|
await runQuery(query, {
|
||||||
|
spaceId: summaryData.spaceId,
|
||||||
|
summary: summaryData.summary,
|
||||||
|
keyEntities: summaryData.keyEntities,
|
||||||
|
themes: summaryData.themes,
|
||||||
|
confidence: summaryData.confidence,
|
||||||
|
contextCount: summaryData.contextCount,
|
||||||
|
lastUpdated: summaryData.lastUpdated.toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`Stored summary for space ${summaryData.spaceId}`, {
|
||||||
|
themes: summaryData.themes.length,
|
||||||
|
keyEntities: summaryData.keyEntities.length,
|
||||||
|
confidence: summaryData.confidence,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(
|
||||||
|
`Error storing summary for space ${summaryData.spaceId}:`,
|
||||||
|
error as Record<string, unknown>,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -4,13 +4,18 @@ import { EpisodeType } from "@core/types";
|
|||||||
import { type z } from "zod";
|
import { type z } from "zod";
|
||||||
import { prisma } from "~/db.server";
|
import { prisma } from "~/db.server";
|
||||||
import { hasCredits } from "~/services/billing.server";
|
import { hasCredits } from "~/services/billing.server";
|
||||||
import { type IngestBodyRequest, ingestTask } from "~/trigger/ingest/ingest";
|
import { type IngestBodyRequest } from "~/trigger/ingest/ingest";
|
||||||
import { ingestDocumentTask } from "~/trigger/ingest/ingest-document";
|
import {
|
||||||
|
enqueueIngestDocument,
|
||||||
|
enqueueIngestEpisode,
|
||||||
|
} from "~/lib/queue-adapter.server";
|
||||||
|
import { trackFeatureUsage } from "~/services/telemetry.server";
|
||||||
|
|
||||||
export const addToQueue = async (
|
export const addToQueue = async (
|
||||||
rawBody: z.infer<typeof IngestBodyRequest>,
|
rawBody: z.infer<typeof IngestBodyRequest>,
|
||||||
userId: string,
|
userId: string,
|
||||||
activityId?: string,
|
activityId?: string,
|
||||||
|
ingestionQueueId?: string,
|
||||||
) => {
|
) => {
|
||||||
const body = { ...rawBody, source: rawBody.source.toLowerCase() };
|
const body = { ...rawBody, source: rawBody.source.toLowerCase() };
|
||||||
const user = await prisma.user.findFirst({
|
const user = await prisma.user.findFirst({
|
||||||
@ -38,8 +43,18 @@ export const addToQueue = async (
|
|||||||
throw new Error("no credits");
|
throw new Error("no credits");
|
||||||
}
|
}
|
||||||
|
|
||||||
const queuePersist = await prisma.ingestionQueue.create({
|
// Upsert: update existing or create new ingestion queue entry
|
||||||
data: {
|
const queuePersist = await prisma.ingestionQueue.upsert({
|
||||||
|
where: {
|
||||||
|
id: ingestionQueueId || "non-existent-id", // Use provided ID or dummy ID to force create
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
data: body,
|
||||||
|
type: body.type,
|
||||||
|
status: IngestionStatus.PENDING,
|
||||||
|
error: null,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
data: body,
|
data: body,
|
||||||
type: body.type,
|
type: body.type,
|
||||||
status: IngestionStatus.PENDING,
|
status: IngestionStatus.PENDING,
|
||||||
@ -51,36 +66,28 @@ export const addToQueue = async (
|
|||||||
|
|
||||||
let handler;
|
let handler;
|
||||||
if (body.type === EpisodeType.DOCUMENT) {
|
if (body.type === EpisodeType.DOCUMENT) {
|
||||||
handler = await ingestDocumentTask.trigger(
|
handler = await enqueueIngestDocument({
|
||||||
{
|
body,
|
||||||
body,
|
userId,
|
||||||
userId,
|
workspaceId: user.Workspace.id,
|
||||||
workspaceId: user.Workspace.id,
|
queueId: queuePersist.id,
|
||||||
queueId: queuePersist.id,
|
});
|
||||||
},
|
|
||||||
{
|
// Track document ingestion
|
||||||
queue: "document-ingestion-queue",
|
trackFeatureUsage("document_ingested", userId).catch(console.error);
|
||||||
concurrencyKey: userId,
|
|
||||||
tags: [user.id, queuePersist.id],
|
|
||||||
},
|
|
||||||
);
|
|
||||||
} else if (body.type === EpisodeType.CONVERSATION) {
|
} else if (body.type === EpisodeType.CONVERSATION) {
|
||||||
handler = await ingestTask.trigger(
|
handler = await enqueueIngestEpisode({
|
||||||
{
|
body,
|
||||||
body,
|
userId,
|
||||||
userId,
|
workspaceId: user.Workspace.id,
|
||||||
workspaceId: user.Workspace.id,
|
queueId: queuePersist.id,
|
||||||
queueId: queuePersist.id,
|
});
|
||||||
},
|
|
||||||
{
|
// Track episode ingestion
|
||||||
queue: "ingestion-queue",
|
trackFeatureUsage("episode_ingested", userId).catch(console.error);
|
||||||
concurrencyKey: userId,
|
|
||||||
tags: [user.id, queuePersist.id],
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return { id: handler?.id, token: handler?.publicAccessToken };
|
return { id: handler?.id, publicAccessToken: handler?.token };
|
||||||
};
|
};
|
||||||
|
|
||||||
export { IngestBodyRequest };
|
export { IngestBodyRequest };
|
||||||
|
|||||||
@ -1,31 +1,23 @@
|
|||||||
import {
|
import { type CoreMessage, embed, generateText, streamText } from "ai";
|
||||||
type CoreMessage,
|
|
||||||
type LanguageModelV1,
|
|
||||||
embed,
|
|
||||||
generateText,
|
|
||||||
streamText,
|
|
||||||
} from "ai";
|
|
||||||
import { openai } from "@ai-sdk/openai";
|
import { openai } from "@ai-sdk/openai";
|
||||||
import { logger } from "~/services/logger.service";
|
import { logger } from "~/services/logger.service";
|
||||||
|
|
||||||
import { createOllama, type OllamaProvider } from "ollama-ai-provider";
|
import { createOllama } from "ollama-ai-provider-v2";
|
||||||
import { anthropic } from "@ai-sdk/anthropic";
|
import { anthropic } from "@ai-sdk/anthropic";
|
||||||
import { google } from "@ai-sdk/google";
|
import { google } from "@ai-sdk/google";
|
||||||
import { createAmazonBedrock } from "@ai-sdk/amazon-bedrock";
|
|
||||||
import { fromNodeProviderChain } from "@aws-sdk/credential-providers";
|
|
||||||
|
|
||||||
export type ModelComplexity = 'high' | 'low';
|
export type ModelComplexity = "high" | "low";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the appropriate model for a given complexity level.
|
* Get the appropriate model for a given complexity level.
|
||||||
* HIGH complexity uses the configured MODEL.
|
* HIGH complexity uses the configured MODEL.
|
||||||
* LOW complexity automatically downgrades to cheaper variants if possible.
|
* LOW complexity automatically downgrades to cheaper variants if possible.
|
||||||
*/
|
*/
|
||||||
export function getModelForTask(complexity: ModelComplexity = 'high'): string {
|
export function getModelForTask(complexity: ModelComplexity = "high"): string {
|
||||||
const baseModel = process.env.MODEL || 'gpt-4.1-2025-04-14';
|
const baseModel = process.env.MODEL || "gpt-4.1-2025-04-14";
|
||||||
|
|
||||||
// HIGH complexity - always use the configured model
|
// HIGH complexity - always use the configured model
|
||||||
if (complexity === 'high') {
|
if (complexity === "high") {
|
||||||
return baseModel;
|
return baseModel;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -33,29 +25,73 @@ export function getModelForTask(complexity: ModelComplexity = 'high'): string {
|
|||||||
// If already using a cheap model, keep it
|
// If already using a cheap model, keep it
|
||||||
const downgrades: Record<string, string> = {
|
const downgrades: Record<string, string> = {
|
||||||
// OpenAI downgrades
|
// OpenAI downgrades
|
||||||
'gpt-5-2025-08-07': 'gpt-5-mini-2025-08-07',
|
"gpt-5-2025-08-07": "gpt-5-mini-2025-08-07",
|
||||||
'gpt-4.1-2025-04-14': 'gpt-4.1-mini-2025-04-14',
|
"gpt-4.1-2025-04-14": "gpt-4.1-mini-2025-04-14",
|
||||||
|
|
||||||
// Anthropic downgrades
|
// Anthropic downgrades
|
||||||
'claude-sonnet-4-5': 'claude-3-5-haiku-20241022',
|
"claude-sonnet-4-5": "claude-3-5-haiku-20241022",
|
||||||
'claude-3-7-sonnet-20250219': 'claude-3-5-haiku-20241022',
|
"claude-3-7-sonnet-20250219": "claude-3-5-haiku-20241022",
|
||||||
'claude-3-opus-20240229': 'claude-3-5-haiku-20241022',
|
"claude-3-opus-20240229": "claude-3-5-haiku-20241022",
|
||||||
|
|
||||||
// Google downgrades
|
// Google downgrades
|
||||||
'gemini-2.5-pro-preview-03-25': 'gemini-2.5-flash-preview-04-17',
|
"gemini-2.5-pro-preview-03-25": "gemini-2.5-flash-preview-04-17",
|
||||||
'gemini-2.0-flash': 'gemini-2.0-flash-lite',
|
"gemini-2.0-flash": "gemini-2.0-flash-lite",
|
||||||
|
|
||||||
// AWS Bedrock downgrades (keep same model - already cost-optimized)
|
// AWS Bedrock downgrades (keep same model - already cost-optimized)
|
||||||
'us.amazon.nova-premier-v1:0': 'us.amazon.nova-premier-v1:0',
|
"us.amazon.nova-premier-v1:0": "us.amazon.nova-premier-v1:0",
|
||||||
};
|
};
|
||||||
|
|
||||||
return downgrades[baseModel] || baseModel;
|
return downgrades[baseModel] || baseModel;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const getModel = (takeModel?: string) => {
|
||||||
|
let model = takeModel;
|
||||||
|
|
||||||
|
const anthropicKey = process.env.ANTHROPIC_API_KEY;
|
||||||
|
const googleKey = process.env.GOOGLE_GENERATIVE_AI_API_KEY;
|
||||||
|
const openaiKey = process.env.OPENAI_API_KEY;
|
||||||
|
let ollamaUrl = process.env.OLLAMA_URL;
|
||||||
|
model = model || process.env.MODEL || "gpt-4.1-2025-04-14";
|
||||||
|
|
||||||
|
let modelInstance;
|
||||||
|
let modelTemperature = Number(process.env.MODEL_TEMPERATURE) || 1;
|
||||||
|
ollamaUrl = undefined;
|
||||||
|
|
||||||
|
// First check if Ollama URL exists and use Ollama
|
||||||
|
if (ollamaUrl) {
|
||||||
|
const ollama = createOllama({
|
||||||
|
baseURL: ollamaUrl,
|
||||||
|
});
|
||||||
|
modelInstance = ollama(model || "llama2"); // Default to llama2 if no model specified
|
||||||
|
} else {
|
||||||
|
// If no Ollama, check other models
|
||||||
|
|
||||||
|
if (model.includes("claude")) {
|
||||||
|
if (!anthropicKey) {
|
||||||
|
throw new Error("No Anthropic API key found. Set ANTHROPIC_API_KEY");
|
||||||
|
}
|
||||||
|
modelInstance = anthropic(model);
|
||||||
|
modelTemperature = 0.5;
|
||||||
|
} else if (model.includes("gemini")) {
|
||||||
|
if (!googleKey) {
|
||||||
|
throw new Error("No Google API key found. Set GOOGLE_API_KEY");
|
||||||
|
}
|
||||||
|
modelInstance = google(model);
|
||||||
|
} else {
|
||||||
|
if (!openaiKey) {
|
||||||
|
throw new Error("No OpenAI API key found. Set OPENAI_API_KEY");
|
||||||
|
}
|
||||||
|
modelInstance = openai(model);
|
||||||
|
}
|
||||||
|
|
||||||
|
return modelInstance;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
export interface TokenUsage {
|
export interface TokenUsage {
|
||||||
promptTokens: number;
|
promptTokens?: number;
|
||||||
completionTokens: number;
|
completionTokens?: number;
|
||||||
totalTokens: number;
|
totalTokens?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function makeModelCall(
|
export async function makeModelCall(
|
||||||
@ -63,69 +99,13 @@ export async function makeModelCall(
|
|||||||
messages: CoreMessage[],
|
messages: CoreMessage[],
|
||||||
onFinish: (text: string, model: string, usage?: TokenUsage) => void,
|
onFinish: (text: string, model: string, usage?: TokenUsage) => void,
|
||||||
options?: any,
|
options?: any,
|
||||||
complexity: ModelComplexity = 'high',
|
complexity: ModelComplexity = "high",
|
||||||
) {
|
) {
|
||||||
let modelInstance: LanguageModelV1 | undefined;
|
|
||||||
let model = getModelForTask(complexity);
|
let model = getModelForTask(complexity);
|
||||||
const ollamaUrl = process.env.OLLAMA_URL;
|
logger.info(`complexity: ${complexity}, model: ${model}`);
|
||||||
let ollama: OllamaProvider | undefined;
|
|
||||||
|
|
||||||
if (ollamaUrl) {
|
const modelInstance = getModel(model);
|
||||||
ollama = createOllama({
|
const generateTextOptions: any = {};
|
||||||
baseURL: ollamaUrl,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const bedrock = createAmazonBedrock({
|
|
||||||
region: process.env.AWS_REGION || 'us-east-1',
|
|
||||||
credentialProvider: fromNodeProviderChain(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const generateTextOptions: any = {}
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`complexity: ${complexity}, model: ${model}`,
|
|
||||||
);
|
|
||||||
switch (model) {
|
|
||||||
case "gpt-4.1-2025-04-14":
|
|
||||||
case "gpt-4.1-mini-2025-04-14":
|
|
||||||
case "gpt-5-mini-2025-08-07":
|
|
||||||
case "gpt-5-2025-08-07":
|
|
||||||
case "gpt-4.1-nano-2025-04-14":
|
|
||||||
modelInstance = openai(model, { ...options });
|
|
||||||
generateTextOptions.temperature = 1
|
|
||||||
break;
|
|
||||||
|
|
||||||
case "claude-3-7-sonnet-20250219":
|
|
||||||
case "claude-3-opus-20240229":
|
|
||||||
case "claude-3-5-haiku-20241022":
|
|
||||||
modelInstance = anthropic(model, { ...options });
|
|
||||||
break;
|
|
||||||
|
|
||||||
case "gemini-2.5-flash-preview-04-17":
|
|
||||||
case "gemini-2.5-pro-preview-03-25":
|
|
||||||
case "gemini-2.0-flash":
|
|
||||||
case "gemini-2.0-flash-lite":
|
|
||||||
modelInstance = google(model, { ...options });
|
|
||||||
break;
|
|
||||||
|
|
||||||
case "us.meta.llama3-3-70b-instruct-v1:0":
|
|
||||||
case "us.deepseek.r1-v1:0":
|
|
||||||
case "qwen.qwen3-32b-v1:0":
|
|
||||||
case "openai.gpt-oss-120b-1:0":
|
|
||||||
case "us.mistral.pixtral-large-2502-v1:0":
|
|
||||||
case "us.amazon.nova-premier-v1:0":
|
|
||||||
modelInstance = bedrock(`${model}`);
|
|
||||||
generateTextOptions.maxTokens = 100000
|
|
||||||
break;
|
|
||||||
|
|
||||||
default:
|
|
||||||
if (ollama) {
|
|
||||||
modelInstance = ollama(model);
|
|
||||||
}
|
|
||||||
logger.warn(`Unsupported model type: ${model}`);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!modelInstance) {
|
if (!modelInstance) {
|
||||||
throw new Error(`Unsupported model type: ${model}`);
|
throw new Error(`Unsupported model type: ${model}`);
|
||||||
@ -135,16 +115,21 @@ export async function makeModelCall(
|
|||||||
return streamText({
|
return streamText({
|
||||||
model: modelInstance,
|
model: modelInstance,
|
||||||
messages,
|
messages,
|
||||||
|
...options,
|
||||||
...generateTextOptions,
|
...generateTextOptions,
|
||||||
onFinish: async ({ text, usage }) => {
|
onFinish: async ({ text, usage }) => {
|
||||||
const tokenUsage = usage ? {
|
const tokenUsage = usage
|
||||||
promptTokens: usage.promptTokens,
|
? {
|
||||||
completionTokens: usage.completionTokens,
|
promptTokens: usage.inputTokens,
|
||||||
totalTokens: usage.totalTokens,
|
completionTokens: usage.outputTokens,
|
||||||
} : undefined;
|
totalTokens: usage.totalTokens,
|
||||||
|
}
|
||||||
|
: undefined;
|
||||||
|
|
||||||
if (tokenUsage) {
|
if (tokenUsage) {
|
||||||
logger.log(`[${complexity.toUpperCase()}] ${model} - Tokens: ${tokenUsage.totalTokens} (prompt: ${tokenUsage.promptTokens}, completion: ${tokenUsage.completionTokens})`);
|
logger.log(
|
||||||
|
`[${complexity.toUpperCase()}] ${model} - Tokens: ${tokenUsage.totalTokens} (prompt: ${tokenUsage.promptTokens}, completion: ${tokenUsage.completionTokens})`,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
onFinish(text, model, tokenUsage);
|
onFinish(text, model, tokenUsage);
|
||||||
@ -158,14 +143,18 @@ export async function makeModelCall(
|
|||||||
...generateTextOptions,
|
...generateTextOptions,
|
||||||
});
|
});
|
||||||
|
|
||||||
const tokenUsage = usage ? {
|
const tokenUsage = usage
|
||||||
promptTokens: usage.promptTokens,
|
? {
|
||||||
completionTokens: usage.completionTokens,
|
promptTokens: usage.inputTokens,
|
||||||
totalTokens: usage.totalTokens,
|
completionTokens: usage.outputTokens,
|
||||||
} : undefined;
|
totalTokens: usage.totalTokens,
|
||||||
|
}
|
||||||
|
: undefined;
|
||||||
|
|
||||||
if (tokenUsage) {
|
if (tokenUsage) {
|
||||||
logger.log(`[${complexity.toUpperCase()}] ${model} - Tokens: ${tokenUsage.totalTokens} (prompt: ${tokenUsage.promptTokens}, completion: ${tokenUsage.completionTokens})`);
|
logger.log(
|
||||||
|
`[${complexity.toUpperCase()}] ${model} - Tokens: ${tokenUsage.totalTokens} (prompt: ${tokenUsage.promptTokens}, completion: ${tokenUsage.completionTokens})`,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
onFinish(text, model, tokenUsage);
|
onFinish(text, model, tokenUsage);
|
||||||
@ -177,19 +166,22 @@ export async function makeModelCall(
|
|||||||
* Determines if a given model is proprietary (OpenAI, Anthropic, Google, Grok)
|
* Determines if a given model is proprietary (OpenAI, Anthropic, Google, Grok)
|
||||||
* or open source (accessed via Bedrock, Ollama, etc.)
|
* or open source (accessed via Bedrock, Ollama, etc.)
|
||||||
*/
|
*/
|
||||||
export function isProprietaryModel(modelName?: string, complexity: ModelComplexity = 'high'): boolean {
|
export function isProprietaryModel(
|
||||||
|
modelName?: string,
|
||||||
|
complexity: ModelComplexity = "high",
|
||||||
|
): boolean {
|
||||||
const model = modelName || getModelForTask(complexity);
|
const model = modelName || getModelForTask(complexity);
|
||||||
if (!model) return false;
|
if (!model) return false;
|
||||||
|
|
||||||
// Proprietary model patterns
|
// Proprietary model patterns
|
||||||
const proprietaryPatterns = [
|
const proprietaryPatterns = [
|
||||||
/^gpt-/, // OpenAI models
|
/^gpt-/, // OpenAI models
|
||||||
/^claude-/, // Anthropic models
|
/^claude-/, // Anthropic models
|
||||||
/^gemini-/, // Google models
|
/^gemini-/, // Google models
|
||||||
/^grok-/, // xAI models
|
/^grok-/, // xAI models
|
||||||
];
|
];
|
||||||
|
|
||||||
return proprietaryPatterns.some(pattern => pattern.test(model));
|
return proprietaryPatterns.some((pattern) => pattern.test(model));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getEmbedding(text: string) {
|
export async function getEmbedding(text: string) {
|
||||||
|
|||||||
@ -112,43 +112,31 @@ export const getNodeLinks = async (userId: string) => {
|
|||||||
export const getClusteredGraphData = async (userId: string) => {
|
export const getClusteredGraphData = async (userId: string) => {
|
||||||
const session = driver.session();
|
const session = driver.session();
|
||||||
try {
|
try {
|
||||||
// Get the simplified graph structure: Episode, Subject, Object with Predicate as edge
|
// Get Episode -> Entity graph, only showing entities connected to more than 1 episode
|
||||||
const result = await session.run(
|
const result = await session.run(
|
||||||
`// Get all statements with their episode and entity connections
|
`// Find entities connected to more than 1 episode
|
||||||
MATCH (e:Episode)-[:HAS_PROVENANCE]->(s:Statement)
|
MATCH (e:Episode{userId: $userId})-[:HAS_PROVENANCE]->(s:Statement {userId: $userId})-[r:HAS_SUBJECT|HAS_OBJECT|HAS_PREDICATE]->(entity:Entity)
|
||||||
WHERE s.userId = $userId
|
WITH entity, count(DISTINCT e) as episodeCount
|
||||||
|
WHERE episodeCount > 1
|
||||||
|
WITH collect(entity.uuid) as validEntityUuids
|
||||||
|
|
||||||
// Get subject and object entities
|
// Build Episode -> Entity relationships for valid entities
|
||||||
MATCH (s)-[:HAS_SUBJECT]->(subj:Entity)
|
MATCH (e:Episode{userId: $userId})-[r:HAS_PROVENANCE]->(s:Statement {userId: $userId})-[r:HAS_SUBJECT|HAS_OBJECT|HAS_PREDICATE]->(entity:Entity)
|
||||||
MATCH (s)-[:HAS_PREDICATE]->(pred:Entity)
|
WHERE entity.uuid IN validEntityUuids
|
||||||
MATCH (s)-[:HAS_OBJECT]->(obj:Entity)
|
WITH DISTINCT e, entity, type(r) as relType,
|
||||||
|
CASE WHEN size(e.spaceIds) > 0 THEN e.spaceIds[0] ELSE null END as clusterId,
|
||||||
// Return Episode, Subject, and Object as nodes with Predicate as edge label
|
s.createdAt as createdAt
|
||||||
WITH e, s, subj, pred, obj
|
|
||||||
UNWIND [
|
|
||||||
// Episode -> Subject
|
|
||||||
{source: e, sourceType: 'Episode', target: subj, targetType: 'Entity', predicate: null},
|
|
||||||
// Episode -> Object
|
|
||||||
{source: e, sourceType: 'Episode', target: obj, targetType: 'Entity', predicate: null},
|
|
||||||
// Subject -> Object (with Predicate as edge)
|
|
||||||
{source: subj, sourceType: 'Entity', target: obj, targetType: 'Entity', predicate: pred.name}
|
|
||||||
] AS rel
|
|
||||||
|
|
||||||
RETURN DISTINCT
|
RETURN DISTINCT
|
||||||
rel.source.uuid as sourceUuid,
|
e.uuid as sourceUuid,
|
||||||
rel.source.name as sourceName,
|
e.content as sourceContent,
|
||||||
rel.source.content as sourceContent,
|
'Episode' as sourceNodeType,
|
||||||
rel.sourceType as sourceNodeType,
|
entity.uuid as targetUuid,
|
||||||
rel.target.uuid as targetUuid,
|
entity.name as targetName,
|
||||||
rel.target.name as targetName,
|
'Entity' as targetNodeType,
|
||||||
rel.targetType as targetNodeType,
|
relType as edgeType,
|
||||||
rel.predicate as predicateLabel,
|
clusterId,
|
||||||
e.uuid as episodeUuid,
|
createdAt`,
|
||||||
e.content as episodeContent,
|
|
||||||
e.spaceIds as spaceIds,
|
|
||||||
s.uuid as statementUuid,
|
|
||||||
s.validAt as validAt,
|
|
||||||
s.createdAt as createdAt`,
|
|
||||||
{ userId },
|
{ userId },
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -157,72 +145,29 @@ export const getClusteredGraphData = async (userId: string) => {
|
|||||||
|
|
||||||
result.records.forEach((record) => {
|
result.records.forEach((record) => {
|
||||||
const sourceUuid = record.get("sourceUuid");
|
const sourceUuid = record.get("sourceUuid");
|
||||||
const sourceName = record.get("sourceName");
|
|
||||||
const sourceContent = record.get("sourceContent");
|
const sourceContent = record.get("sourceContent");
|
||||||
const sourceNodeType = record.get("sourceNodeType");
|
|
||||||
|
|
||||||
const targetUuid = record.get("targetUuid");
|
const targetUuid = record.get("targetUuid");
|
||||||
const targetName = record.get("targetName");
|
const targetName = record.get("targetName");
|
||||||
const targetNodeType = record.get("targetNodeType");
|
const edgeType = record.get("edgeType");
|
||||||
|
const clusterId = record.get("clusterId");
|
||||||
const predicateLabel = record.get("predicateLabel");
|
|
||||||
const episodeUuid = record.get("episodeUuid");
|
|
||||||
const clusterIds = record.get("spaceIds");
|
|
||||||
const clusterId = clusterIds ? clusterIds[0] : undefined;
|
|
||||||
const createdAt = record.get("createdAt");
|
const createdAt = record.get("createdAt");
|
||||||
|
|
||||||
// Create unique edge identifier to avoid duplicates
|
// Create unique edge identifier to avoid duplicates
|
||||||
// For Episode->Subject edges, use generic type; for Subject->Object use predicate
|
|
||||||
const edgeType = predicateLabel || "HAS_SUBJECT";
|
|
||||||
const edgeKey = `${sourceUuid}-${targetUuid}-${edgeType}`;
|
const edgeKey = `${sourceUuid}-${targetUuid}-${edgeType}`;
|
||||||
if (processedEdges.has(edgeKey)) return;
|
if (processedEdges.has(edgeKey)) return;
|
||||||
processedEdges.add(edgeKey);
|
processedEdges.add(edgeKey);
|
||||||
|
|
||||||
// Build node attributes based on type
|
|
||||||
const sourceAttributes =
|
|
||||||
sourceNodeType === "Episode"
|
|
||||||
? {
|
|
||||||
nodeType: "Episode",
|
|
||||||
content: sourceContent,
|
|
||||||
episodeUuid: sourceUuid,
|
|
||||||
clusterId,
|
|
||||||
}
|
|
||||||
: {
|
|
||||||
nodeType: "Entity",
|
|
||||||
name: sourceName,
|
|
||||||
clusterId,
|
|
||||||
};
|
|
||||||
|
|
||||||
const targetAttributes =
|
|
||||||
targetNodeType === "Episode"
|
|
||||||
? {
|
|
||||||
nodeType: "Episode",
|
|
||||||
content: sourceContent,
|
|
||||||
episodeUuid: targetUuid,
|
|
||||||
clusterId,
|
|
||||||
}
|
|
||||||
: {
|
|
||||||
nodeType: "Entity",
|
|
||||||
name: targetName,
|
|
||||||
clusterId,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Build display name
|
|
||||||
const sourceDisplayName =
|
|
||||||
sourceNodeType === "Episode"
|
|
||||||
? sourceContent || episodeUuid
|
|
||||||
: sourceName || sourceUuid;
|
|
||||||
const targetDisplayName =
|
|
||||||
targetNodeType === "Episode"
|
|
||||||
? sourceContent || episodeUuid
|
|
||||||
: targetName || targetUuid;
|
|
||||||
|
|
||||||
triplets.push({
|
triplets.push({
|
||||||
sourceNode: {
|
sourceNode: {
|
||||||
uuid: sourceUuid,
|
uuid: sourceUuid,
|
||||||
labels: [sourceNodeType],
|
labels: ["Episode"],
|
||||||
attributes: sourceAttributes,
|
attributes: {
|
||||||
name: sourceDisplayName,
|
nodeType: "Episode",
|
||||||
|
content: sourceContent,
|
||||||
|
episodeUuid: sourceUuid,
|
||||||
|
clusterId,
|
||||||
|
},
|
||||||
|
name: sourceContent || sourceUuid,
|
||||||
clusterId,
|
clusterId,
|
||||||
createdAt: createdAt || "",
|
createdAt: createdAt || "",
|
||||||
},
|
},
|
||||||
@ -235,10 +180,14 @@ export const getClusteredGraphData = async (userId: string) => {
|
|||||||
},
|
},
|
||||||
targetNode: {
|
targetNode: {
|
||||||
uuid: targetUuid,
|
uuid: targetUuid,
|
||||||
labels: [targetNodeType],
|
labels: ["Entity"],
|
||||||
attributes: targetAttributes,
|
attributes: {
|
||||||
|
nodeType: "Entity",
|
||||||
|
name: targetName,
|
||||||
|
clusterId,
|
||||||
|
},
|
||||||
|
name: targetName || targetUuid,
|
||||||
clusterId,
|
clusterId,
|
||||||
name: targetDisplayName,
|
|
||||||
createdAt: createdAt || "",
|
createdAt: createdAt || "",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|||||||
324
apps/webapp/app/lib/prompt.server.ts
Normal file
324
apps/webapp/app/lib/prompt.server.ts
Normal file
@ -0,0 +1,324 @@
|
|||||||
|
import { type StopCondition } from "ai";
|
||||||
|
|
||||||
|
export const hasAnswer: StopCondition<any> = ({ steps }) => {
|
||||||
|
return (
|
||||||
|
steps.some((step) => step.text?.includes("</final_response>")) ?? false
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const hasQuestion: StopCondition<any> = ({ steps }) => {
|
||||||
|
return (
|
||||||
|
steps.some((step) => step.text?.includes("</question_response>")) ?? false
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const REACT_SYSTEM_PROMPT = `
|
||||||
|
You are a helpful AI assistant with access to user memory. Your primary capabilities are:
|
||||||
|
|
||||||
|
1. **Memory-First Approach**: Always check user memory first to understand context and previous interactions
|
||||||
|
2. **Intelligent Information Gathering**: Analyze queries to determine if current information is needed
|
||||||
|
3. **Memory Management**: Help users store, retrieve, and organize information in their memory
|
||||||
|
4. **Contextual Assistance**: Use memory to provide personalized and contextual responses
|
||||||
|
|
||||||
|
<information_gathering>
|
||||||
|
Follow this intelligent approach for information gathering:
|
||||||
|
|
||||||
|
1. **MEMORY FIRST** (Always Required)
|
||||||
|
- Always check memory FIRST using core--search_memory before any other actions
|
||||||
|
- Consider this your highest priority for EVERY interaction - as essential as breathing
|
||||||
|
- Memory provides context, personal preferences, and historical information
|
||||||
|
- Use memory to understand user's background, ongoing projects, and past conversations
|
||||||
|
|
||||||
|
2. **INFORMATION SYNTHESIS** (Combine Sources)
|
||||||
|
- Use memory to personalize current information based on user preferences
|
||||||
|
- Always store new useful information in memory using core--add_memory
|
||||||
|
|
||||||
|
3. **TRAINING KNOWLEDGE** (Foundation)
|
||||||
|
- Use your training knowledge as the foundation for analysis and explanation
|
||||||
|
- Apply training knowledge to interpret and contextualize information from memory
|
||||||
|
- Indicate when you're using training knowledge vs. live information sources
|
||||||
|
|
||||||
|
EXECUTION APPROACH:
|
||||||
|
- Memory search is mandatory for every interaction
|
||||||
|
- Always indicate your information sources in responses
|
||||||
|
</information_gathering>
|
||||||
|
|
||||||
|
<memory>
|
||||||
|
QUERY FORMATION:
|
||||||
|
- Write specific factual statements as queries (e.g., "user email address" not "what is the user's email?")
|
||||||
|
- Create multiple targeted memory queries for complex requests
|
||||||
|
|
||||||
|
KEY QUERY AREAS:
|
||||||
|
- Personal context: user name, location, identity, work context
|
||||||
|
- Project context: repositories, codebases, current work, team members
|
||||||
|
- Task context: recent tasks, ongoing projects, deadlines, priorities
|
||||||
|
- Integration context: GitHub repos, Slack channels, Linear projects, connected services
|
||||||
|
- Communication patterns: email preferences, notification settings, workflow automation
|
||||||
|
- Technical context: coding languages, frameworks, development environment
|
||||||
|
- Collaboration context: team members, project stakeholders, meeting patterns
|
||||||
|
- Preferences: likes, dislikes, communication style, tool preferences
|
||||||
|
- History: previous discussions, past requests, completed work, recurring issues
|
||||||
|
- Automation rules: user-defined workflows, triggers, automation preferences
|
||||||
|
|
||||||
|
MEMORY USAGE:
|
||||||
|
- Execute multiple memory queries in parallel rather than sequentially
|
||||||
|
- Batch related memory queries when possible
|
||||||
|
- Prioritize recent information over older memories
|
||||||
|
- Create comprehensive context-aware queries based on user message/activity content
|
||||||
|
- Extract and query SEMANTIC CONTENT, not just structural metadata
|
||||||
|
- Parse titles, descriptions, and content for actual subject matter keywords
|
||||||
|
- Search internal SOL tasks/conversations that may relate to the same topics
|
||||||
|
- Query ALL relatable concepts, not just direct keywords or IDs
|
||||||
|
- Search for similar past situations, patterns, and related work
|
||||||
|
- Include synonyms, related terms, and contextual concepts in queries
|
||||||
|
- Query user's historical approach to similar requests or activities
|
||||||
|
- Search for connected projects, tasks, conversations, and collaborations
|
||||||
|
- Retrieve workflow patterns and past decision-making context
|
||||||
|
- Query broader domain context beyond immediate request scope
|
||||||
|
- Remember: SOL tracks work that external tools don't - search internal content thoroughly
|
||||||
|
- Blend memory insights naturally into responses
|
||||||
|
- Verify you've checked relevant memory before finalizing ANY response
|
||||||
|
|
||||||
|
</memory>
|
||||||
|
|
||||||
|
<external_services>
|
||||||
|
- To use: load_mcp with EXACT integration name from the available list
|
||||||
|
- Can load multiple at once with an array
|
||||||
|
- Only load when tools are NOT already available in your current toolset
|
||||||
|
- If a tool is already available, use it directly without load_mcp
|
||||||
|
- If requested integration unavailable: inform user politely
|
||||||
|
</external_services>
|
||||||
|
|
||||||
|
<tool_calling>
|
||||||
|
You have tools at your disposal to assist users:
|
||||||
|
|
||||||
|
CORE PRINCIPLES:
|
||||||
|
- Use tools only when necessary for the task at hand
|
||||||
|
- Always check memory FIRST before making other tool calls
|
||||||
|
- Execute multiple operations in parallel whenever possible
|
||||||
|
- Use sequential calls only when output of one is required for input of another
|
||||||
|
|
||||||
|
PARAMETER HANDLING:
|
||||||
|
- Follow tool schemas exactly with all required parameters
|
||||||
|
- Only use values that are:
|
||||||
|
• Explicitly provided by the user (use EXACTLY as given)
|
||||||
|
• Reasonably inferred from context
|
||||||
|
• Retrieved from memory or prior tool calls
|
||||||
|
- Never make up values for required parameters
|
||||||
|
- Omit optional parameters unless clearly needed
|
||||||
|
- Analyze user's descriptive terms for parameter clues
|
||||||
|
|
||||||
|
TOOL SELECTION:
|
||||||
|
- Never call tools not provided in this conversation
|
||||||
|
- Skip tool calls for general questions you can answer directly from memory/knowledge
|
||||||
|
- For identical operations on multiple items, use parallel tool calls
|
||||||
|
- Default to parallel execution (3-5× faster than sequential calls)
|
||||||
|
- You can always access external service tools by loading them with load_mcp first
|
||||||
|
|
||||||
|
TOOL MENTION HANDLING:
|
||||||
|
When user message contains <mention data-id="tool_name" data-label="tool"></mention>:
|
||||||
|
- Extract tool_name from data-id attribute
|
||||||
|
- First check if it's a built-in tool; if not, check EXTERNAL SERVICES TOOLS
|
||||||
|
- If available: Load it with load_mcp and focus on addressing the request with this tool
|
||||||
|
- If unavailable: Inform user and suggest alternatives if possible
|
||||||
|
- For multiple tool mentions: Load all applicable tools in a single load_mcp call
|
||||||
|
|
||||||
|
ERROR HANDLING:
|
||||||
|
- If a tool returns an error, try fixing parameters before retrying
|
||||||
|
- If you can't resolve an error, explain the issue to the user
|
||||||
|
- Consider alternative tools when primary tools are unavailable
|
||||||
|
</tool_calling>
|
||||||
|
|
||||||
|
<communication>
|
||||||
|
Use EXACTLY ONE of these formats for all user-facing communication:
|
||||||
|
|
||||||
|
PROGRESS UPDATES - During processing:
|
||||||
|
- Use the core--progress_update tool to keep users informed
|
||||||
|
- Update users about what you're discovering or doing next
|
||||||
|
- Keep messages clear and user-friendly
|
||||||
|
- Avoid technical jargon
|
||||||
|
|
||||||
|
QUESTIONS - When you need information:
|
||||||
|
<question_response>
|
||||||
|
<p>[Your question with HTML formatting]</p>
|
||||||
|
</question_response>
|
||||||
|
|
||||||
|
- Ask questions only when you cannot find information through memory, or tools
|
||||||
|
- Be specific about what you need to know
|
||||||
|
- Provide context for why you're asking
|
||||||
|
|
||||||
|
FINAL ANSWERS - When completing tasks:
|
||||||
|
<final_response>
|
||||||
|
<p>[Your answer with HTML formatting]</p>
|
||||||
|
</final_response>
|
||||||
|
|
||||||
|
CRITICAL:
|
||||||
|
- Use ONE format per turn
|
||||||
|
- Apply proper HTML formatting (<h1>, <h2>, <p>, <ul>, <li>, etc.)
|
||||||
|
- Never mix communication formats
|
||||||
|
- Keep responses clear and helpful
|
||||||
|
- Always indicate your information sources (memory, and/or knowledge)
|
||||||
|
</communication>
|
||||||
|
`;
|
||||||
|
|
||||||
|
export function getReActPrompt(
|
||||||
|
metadata?: { source?: string; url?: string; pageTitle?: string },
|
||||||
|
intentOverride?: string,
|
||||||
|
): string {
|
||||||
|
const contextHints = [];
|
||||||
|
|
||||||
|
if (
|
||||||
|
metadata?.source === "chrome" &&
|
||||||
|
metadata?.url?.includes("mail.google.com")
|
||||||
|
) {
|
||||||
|
contextHints.push("Content is from email - likely reading intent");
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
metadata?.source === "chrome" &&
|
||||||
|
metadata?.url?.includes("calendar.google.com")
|
||||||
|
) {
|
||||||
|
contextHints.push("Content is from calendar - likely meeting prep intent");
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
metadata?.source === "chrome" &&
|
||||||
|
metadata?.url?.includes("docs.google.com")
|
||||||
|
) {
|
||||||
|
contextHints.push(
|
||||||
|
"Content is from document editor - likely writing intent",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (metadata?.source === "obsidian") {
|
||||||
|
contextHints.push(
|
||||||
|
"Content is from note editor - likely writing or research intent",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return `You are a memory research agent analyzing content to find relevant context.
|
||||||
|
|
||||||
|
YOUR PROCESS (ReAct Framework):
|
||||||
|
|
||||||
|
1. DECOMPOSE: First, break down the content into structured categories
|
||||||
|
|
||||||
|
Analyze the content and extract:
|
||||||
|
a) ENTITIES: Specific people, project names, tools, products mentioned
|
||||||
|
Example: "John Smith", "Phoenix API", "Redis", "mobile app"
|
||||||
|
|
||||||
|
b) TOPICS & CONCEPTS: Key subjects, themes, domains
|
||||||
|
Example: "authentication", "database design", "performance optimization"
|
||||||
|
|
||||||
|
c) TEMPORAL MARKERS: Time references, deadlines, events
|
||||||
|
Example: "last week's meeting", "Q2 launch", "yesterday's discussion"
|
||||||
|
|
||||||
|
d) ACTIONS & TASKS: What's being done, decided, or requested
|
||||||
|
Example: "implement feature", "review code", "make decision on"
|
||||||
|
|
||||||
|
e) USER INTENT: What is the user trying to accomplish?
|
||||||
|
${intentOverride ? `User specified: "${intentOverride}"` : "Infer from context: reading/writing/meeting prep/research/task tracking/review"}
|
||||||
|
|
||||||
|
2. FORM QUERIES: Create targeted search queries from your decomposition
|
||||||
|
|
||||||
|
Based on decomposition, form specific queries:
|
||||||
|
- Search for each entity by name (people, projects, tools)
|
||||||
|
- Search for topics the user has discussed before
|
||||||
|
- Search for related work or conversations in this domain
|
||||||
|
- Use the user's actual terminology, not generic concepts
|
||||||
|
|
||||||
|
EXAMPLE - Content: "Email from Sarah about the API redesign we discussed last week"
|
||||||
|
Decomposition:
|
||||||
|
- Entities: "Sarah", "API redesign"
|
||||||
|
- Topics: "API design", "redesign"
|
||||||
|
- Temporal: "last week"
|
||||||
|
- Actions: "discussed", "email communication"
|
||||||
|
- Intent: Reading (email) / meeting prep
|
||||||
|
|
||||||
|
Queries to form:
|
||||||
|
✅ "Sarah" (find past conversations with Sarah)
|
||||||
|
✅ "API redesign" or "API design" (find project discussions)
|
||||||
|
✅ "last week" + "Sarah" (find recent context)
|
||||||
|
✅ "meetings" or "discussions" (find related conversations)
|
||||||
|
|
||||||
|
❌ Avoid: "email communication patterns", "API architecture philosophy"
|
||||||
|
(These are abstract - search what user actually discussed!)
|
||||||
|
|
||||||
|
3. SEARCH: Execute your queries using searchMemory tool
|
||||||
|
- Start with 2-3 core searches based on main entities/topics
|
||||||
|
- Make each search specific and targeted
|
||||||
|
- Use actual terms from the content, not rephrased concepts
|
||||||
|
|
||||||
|
4. OBSERVE: Evaluate search results
|
||||||
|
- Did you find relevant episodes? How many unique ones?
|
||||||
|
- What specific context emerged?
|
||||||
|
- What new entities/topics appeared in results?
|
||||||
|
- Are there gaps in understanding?
|
||||||
|
- Should you search more angles?
|
||||||
|
|
||||||
|
Note: Episode counts are automatically deduplicated across searches - overlapping episodes are only counted once.
|
||||||
|
|
||||||
|
5. REACT: Decide next action based on observations
|
||||||
|
|
||||||
|
STOPPING CRITERIA - Proceed to SYNTHESIZE if ANY of these are true:
|
||||||
|
- You found 20+ unique episodes across your searches → ENOUGH CONTEXT
|
||||||
|
- You performed 5+ searches and found relevant episodes → SUFFICIENT
|
||||||
|
- You performed 7+ searches regardless of results → EXHAUSTED STRATEGIES
|
||||||
|
- You found strong relevant context from multiple angles → COMPLETE
|
||||||
|
|
||||||
|
System nudges will provide awareness of your progress, but you decide when synthesis quality would be optimal.
|
||||||
|
|
||||||
|
If you found little/no context AND searched less than 7 times:
|
||||||
|
- Try different query angles from your decomposition
|
||||||
|
- Search broader related topics
|
||||||
|
- Search user's projects or work areas
|
||||||
|
- Try alternative terminology
|
||||||
|
|
||||||
|
⚠️ DO NOT search endlessly - if you found relevant episodes, STOP and synthesize!
|
||||||
|
|
||||||
|
6. SYNTHESIZE: After gathering sufficient context, provide final answer
|
||||||
|
- Wrap your synthesis in <final_response> tags
|
||||||
|
- Present direct factual context from memory - no meta-commentary
|
||||||
|
- Write as if providing background context to an AI assistant
|
||||||
|
- Include: facts, decisions, preferences, patterns, timelines
|
||||||
|
- Note any gaps, contradictions, or evolution in thinking
|
||||||
|
- Keep it concise and actionable
|
||||||
|
- DO NOT use phrases like "Previous discussions on", "From conversations", "Past preferences indicate"
|
||||||
|
- DO NOT use conversational language like "you said" or "you mentioned"
|
||||||
|
- Present information as direct factual statements
|
||||||
|
|
||||||
|
FINAL RESPONSE FORMAT:
|
||||||
|
<final_response>
|
||||||
|
[Direct synthesized context - factual statements only]
|
||||||
|
|
||||||
|
Good examples:
|
||||||
|
- "The API redesign focuses on performance and scalability. Key decisions: moving to GraphQL, caching layer with Redis."
|
||||||
|
- "Project Phoenix launches Q2 2024. Main features: real-time sync, offline mode, collaborative editing."
|
||||||
|
- "Sarah leads the backend team. Recent work includes authentication refactor and database migration."
|
||||||
|
|
||||||
|
Bad examples:
|
||||||
|
❌ "Previous discussions on the API revealed..."
|
||||||
|
❌ "From past conversations, it appears that..."
|
||||||
|
❌ "Past preferences indicate..."
|
||||||
|
❌ "The user mentioned that..."
|
||||||
|
|
||||||
|
Just state the facts directly.
|
||||||
|
</final_response>
|
||||||
|
|
||||||
|
${contextHints.length > 0 ? `\nCONTEXT HINTS:\n${contextHints.join("\n")}` : ""}
|
||||||
|
|
||||||
|
CRITICAL REQUIREMENTS:
|
||||||
|
- ALWAYS start with DECOMPOSE step - extract entities, topics, temporal markers, actions
|
||||||
|
- Form specific queries from your decomposition - use user's actual terms
|
||||||
|
- Minimum 3 searches required
|
||||||
|
- Maximum 10 searches allowed - must synthesize after that
|
||||||
|
- STOP and synthesize when you hit stopping criteria (20+ episodes, 5+ searches with results, 7+ searches total)
|
||||||
|
- Each search should target different aspects from decomposition
|
||||||
|
- Present synthesis directly without meta-commentary
|
||||||
|
|
||||||
|
SEARCH QUALITY CHECKLIST:
|
||||||
|
✅ Queries use specific terms from content (names, projects, exact phrases)
|
||||||
|
✅ Searched multiple angles from decomposition (entities, topics, related areas)
|
||||||
|
✅ Stop when you have enough unique context - don't search endlessly
|
||||||
|
✅ Tried alternative terminology if initial searches found nothing
|
||||||
|
❌ Avoid generic/abstract queries that don't match user's vocabulary
|
||||||
|
❌ Don't stop at 3 searches if you found zero unique episodes
|
||||||
|
❌ Don't keep searching when you already found 20+ unique episodes
|
||||||
|
}`;
|
||||||
|
}
|
||||||
233
apps/webapp/app/lib/queue-adapter.server.ts
Normal file
233
apps/webapp/app/lib/queue-adapter.server.ts
Normal file
@ -0,0 +1,233 @@
|
|||||||
|
/**
|
||||||
|
* Queue Adapter
|
||||||
|
*
|
||||||
|
* This module provides a unified interface for queueing background jobs,
|
||||||
|
* supporting both Trigger.dev and BullMQ backends based on the QUEUE_PROVIDER
|
||||||
|
* environment variable.
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* - Set QUEUE_PROVIDER="trigger" for Trigger.dev (default, good for production scaling)
|
||||||
|
* - Set QUEUE_PROVIDER="bullmq" for BullMQ (good for open-source deployments)
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { env } from "~/env.server";
|
||||||
|
import type { z } from "zod";
|
||||||
|
import type { IngestBodyRequest } from "~/jobs/ingest/ingest-episode.logic";
|
||||||
|
import type { CreateConversationTitlePayload } from "~/jobs/conversation/create-title.logic";
|
||||||
|
import type { SessionCompactionPayload } from "~/jobs/session/session-compaction.logic";
|
||||||
|
import type { SpaceAssignmentPayload } from "~/jobs/spaces/space-assignment.logic";
|
||||||
|
import type { SpaceSummaryPayload } from "~/jobs/spaces/space-summary.logic";
|
||||||
|
|
||||||
|
type QueueProvider = "trigger" | "bullmq";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enqueue episode ingestion job
|
||||||
|
*/
|
||||||
|
export async function enqueueIngestEpisode(payload: {
|
||||||
|
body: z.infer<typeof IngestBodyRequest>;
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
queueId: string;
|
||||||
|
}): Promise<{ id?: string; token?: string }> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { ingestTask } = await import("~/trigger/ingest/ingest");
|
||||||
|
const handler = await ingestTask.trigger(payload, {
|
||||||
|
queue: "ingestion-queue",
|
||||||
|
concurrencyKey: payload.userId,
|
||||||
|
tags: [payload.userId, payload.queueId],
|
||||||
|
});
|
||||||
|
return { id: handler.id, token: handler.publicAccessToken };
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { ingestQueue } = await import("~/bullmq/queues");
|
||||||
|
const job = await ingestQueue.add("ingest-episode", payload, {
|
||||||
|
jobId: payload.queueId,
|
||||||
|
attempts: 3,
|
||||||
|
backoff: { type: "exponential", delay: 2000 },
|
||||||
|
});
|
||||||
|
return { id: job.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enqueue document ingestion job
|
||||||
|
*/
|
||||||
|
export async function enqueueIngestDocument(payload: {
|
||||||
|
body: z.infer<typeof IngestBodyRequest>;
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
queueId: string;
|
||||||
|
}): Promise<{ id?: string; token?: string }> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { ingestDocumentTask } = await import(
|
||||||
|
"~/trigger/ingest/ingest-document"
|
||||||
|
);
|
||||||
|
const handler = await ingestDocumentTask.trigger(payload, {
|
||||||
|
queue: "document-ingestion-queue",
|
||||||
|
concurrencyKey: payload.userId,
|
||||||
|
tags: [payload.userId, payload.queueId],
|
||||||
|
});
|
||||||
|
return { id: handler.id, token: handler.publicAccessToken };
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { documentIngestQueue } = await import("~/bullmq/queues");
|
||||||
|
const job = await documentIngestQueue.add("ingest-document", payload, {
|
||||||
|
jobId: payload.queueId,
|
||||||
|
attempts: 3,
|
||||||
|
backoff: { type: "exponential", delay: 2000 },
|
||||||
|
});
|
||||||
|
|
||||||
|
return { id: job.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enqueue conversation title creation job
|
||||||
|
*/
|
||||||
|
export async function enqueueCreateConversationTitle(
|
||||||
|
payload: CreateConversationTitlePayload,
|
||||||
|
): Promise<{ id?: string }> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { createConversationTitle } = await import(
|
||||||
|
"~/trigger/conversation/create-conversation-title"
|
||||||
|
);
|
||||||
|
const handler = await createConversationTitle.trigger(payload);
|
||||||
|
return { id: handler.id };
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { conversationTitleQueue } = await import("~/bullmq/queues");
|
||||||
|
const job = await conversationTitleQueue.add(
|
||||||
|
"create-conversation-title",
|
||||||
|
payload,
|
||||||
|
{
|
||||||
|
attempts: 3,
|
||||||
|
backoff: { type: "exponential", delay: 2000 },
|
||||||
|
},
|
||||||
|
);
|
||||||
|
return { id: job.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enqueue session compaction job
|
||||||
|
*/
|
||||||
|
export async function enqueueSessionCompaction(
|
||||||
|
payload: SessionCompactionPayload,
|
||||||
|
): Promise<{ id?: string }> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { sessionCompactionTask } = await import(
|
||||||
|
"~/trigger/session/session-compaction"
|
||||||
|
);
|
||||||
|
const handler = await sessionCompactionTask.trigger(payload);
|
||||||
|
return { id: handler.id };
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { sessionCompactionQueue } = await import("~/bullmq/queues");
|
||||||
|
const job = await sessionCompactionQueue.add(
|
||||||
|
"session-compaction",
|
||||||
|
payload,
|
||||||
|
{
|
||||||
|
attempts: 3,
|
||||||
|
backoff: { type: "exponential", delay: 2000 },
|
||||||
|
},
|
||||||
|
);
|
||||||
|
return { id: job.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enqueue space assignment job
|
||||||
|
*/
|
||||||
|
export async function enqueueSpaceAssignment(
|
||||||
|
payload: SpaceAssignmentPayload,
|
||||||
|
): Promise<{ id?: string }> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { triggerSpaceAssignment } = await import(
|
||||||
|
"~/trigger/spaces/space-assignment"
|
||||||
|
);
|
||||||
|
const handler = await triggerSpaceAssignment(payload);
|
||||||
|
return { id: handler.id };
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { spaceAssignmentQueue } = await import("~/bullmq/queues");
|
||||||
|
const job = await spaceAssignmentQueue.add("space-assignment", payload, {
|
||||||
|
jobId: `space-assignment-${payload.userId}-${payload.mode}-${Date.now()}`,
|
||||||
|
attempts: 3,
|
||||||
|
backoff: { type: "exponential", delay: 2000 },
|
||||||
|
});
|
||||||
|
return { id: job.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enqueue space summary job
|
||||||
|
*/
|
||||||
|
export async function enqueueSpaceSummary(
|
||||||
|
payload: SpaceSummaryPayload,
|
||||||
|
): Promise<{ id?: string }> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { triggerSpaceSummary } = await import(
|
||||||
|
"~/trigger/spaces/space-summary"
|
||||||
|
);
|
||||||
|
const handler = await triggerSpaceSummary(payload);
|
||||||
|
return { id: handler.id };
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { spaceSummaryQueue } = await import("~/bullmq/queues");
|
||||||
|
const job = await spaceSummaryQueue.add("space-summary", payload, {
|
||||||
|
jobId: `space-summary-${payload.spaceId}-${Date.now()}`,
|
||||||
|
attempts: 3,
|
||||||
|
backoff: { type: "exponential", delay: 2000 },
|
||||||
|
});
|
||||||
|
return { id: job.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enqueue BERT topic analysis job
|
||||||
|
*/
|
||||||
|
export async function enqueueBertTopicAnalysis(payload: {
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
minTopicSize?: number;
|
||||||
|
nrTopics?: number;
|
||||||
|
}): Promise<{ id?: string }> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { bertTopicAnalysisTask } = await import(
|
||||||
|
"~/trigger/bert/topic-analysis"
|
||||||
|
);
|
||||||
|
const handler = await bertTopicAnalysisTask.trigger(payload, {
|
||||||
|
queue: "bert-topic-analysis",
|
||||||
|
concurrencyKey: payload.userId,
|
||||||
|
tags: [payload.userId, "bert-analysis"],
|
||||||
|
});
|
||||||
|
return { id: handler.id };
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { bertTopicQueue } = await import("~/bullmq/queues");
|
||||||
|
const job = await bertTopicQueue.add("topic-analysis", payload, {
|
||||||
|
jobId: `bert-${payload.userId}-${Date.now()}`,
|
||||||
|
attempts: 2, // Only 2 attempts for expensive operations
|
||||||
|
backoff: { type: "exponential", delay: 5000 },
|
||||||
|
});
|
||||||
|
return { id: job.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const isTriggerDeployment = () => {
|
||||||
|
return env.QUEUE_PROVIDER === "trigger";
|
||||||
|
};
|
||||||
@ -2,6 +2,8 @@ import type { Prisma, User } from "@core/database";
|
|||||||
import type { GoogleProfile } from "@coji/remix-auth-google";
|
import type { GoogleProfile } from "@coji/remix-auth-google";
|
||||||
import { prisma } from "~/db.server";
|
import { prisma } from "~/db.server";
|
||||||
import { env } from "~/env.server";
|
import { env } from "~/env.server";
|
||||||
|
import { runQuery } from "~/lib/neo4j.server";
|
||||||
|
import { trackFeatureUsage } from "~/services/telemetry.server";
|
||||||
export type { User } from "@core/database";
|
export type { User } from "@core/database";
|
||||||
|
|
||||||
type FindOrCreateMagicLink = {
|
type FindOrCreateMagicLink = {
|
||||||
@ -71,9 +73,16 @@ export async function findOrCreateMagicLinkUser(
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const isNewUser = !existingUser;
|
||||||
|
|
||||||
|
// Track new user registration
|
||||||
|
if (isNewUser) {
|
||||||
|
trackFeatureUsage("user_registered", user.id).catch(console.error);
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
user,
|
user,
|
||||||
isNewUser: !existingUser,
|
isNewUser,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -159,9 +168,16 @@ export async function findOrCreateGoogleUser({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const isNewUser = !existingUser;
|
||||||
|
|
||||||
|
// Track new user registration
|
||||||
|
if (isNewUser) {
|
||||||
|
trackFeatureUsage("user_registered", user.id).catch(console.error);
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
user,
|
user,
|
||||||
isNewUser: !existingUser,
|
isNewUser,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -238,3 +254,45 @@ export async function grantUserCloudAccess({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function deleteUser(id: User["id"]) {
|
||||||
|
// Get user to verify they exist
|
||||||
|
const user = await prisma.user.findUnique({
|
||||||
|
where: { id },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
throw new Error("User not found");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete all user-related nodes from the Neo4j knowledge graph
|
||||||
|
try {
|
||||||
|
// Delete all nodes (Episodes, Entities, Statements, Spaces, Documents, Clusters)
|
||||||
|
// and their relationships where userId matches
|
||||||
|
await runQuery(
|
||||||
|
`
|
||||||
|
MATCH (n {userId: $userId})
|
||||||
|
DETACH DELETE n
|
||||||
|
`,
|
||||||
|
{ userId: id }
|
||||||
|
);
|
||||||
|
console.log(`Deleted all graph nodes for user ${id}`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to delete graph nodes:", error);
|
||||||
|
// Continue with deletion even if graph cleanup fails
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete the user - cascade deletes will handle all related data:
|
||||||
|
// - Workspace (and all workspace-related data via cascade)
|
||||||
|
// - PersonalAccessToken
|
||||||
|
// - UserUsage
|
||||||
|
// - Conversations, ConversationHistory
|
||||||
|
// - IngestionRules
|
||||||
|
// - IntegrationAccounts
|
||||||
|
// - RecallLogs
|
||||||
|
// - WebhookConfigurations
|
||||||
|
// - All OAuth models
|
||||||
|
return prisma.user.delete({
|
||||||
|
where: { id },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|||||||
@ -29,12 +29,6 @@ Exclude:
|
|||||||
• Anything not explicitly consented to share
|
• Anything not explicitly consented to share
|
||||||
don't store anything the user did not explicitly consent to share.`;
|
don't store anything the user did not explicitly consent to share.`;
|
||||||
|
|
||||||
const githubDescription = `Everything related to my GitHub work - repos I'm working on, projects I contribute to, code I'm writing, PRs I'm reviewing. Basically my coding life on GitHub.`;
|
|
||||||
|
|
||||||
const healthDescription = `My health and wellness stuff - how I'm feeling, what I'm learning about my body, experiments I'm trying, patterns I notice. Whatever matters to me about staying healthy.`;
|
|
||||||
|
|
||||||
const fitnessDescription = `My workouts and training - what I'm doing at the gym, runs I'm going on, progress I'm making, goals I'm chasing. Anything related to physical exercise and getting stronger.`;
|
|
||||||
|
|
||||||
export async function createWorkspace(
|
export async function createWorkspace(
|
||||||
input: CreateWorkspaceDto,
|
input: CreateWorkspaceDto,
|
||||||
): Promise<Workspace> {
|
): Promise<Workspace> {
|
||||||
@ -56,32 +50,7 @@ export async function createWorkspace(
|
|||||||
await ensureBillingInitialized(workspace.id);
|
await ensureBillingInitialized(workspace.id);
|
||||||
|
|
||||||
// Create default spaces
|
// Create default spaces
|
||||||
await Promise.all([
|
await Promise.all([]);
|
||||||
spaceService.createSpace({
|
|
||||||
name: "Profile",
|
|
||||||
description: profileRule,
|
|
||||||
userId: input.userId,
|
|
||||||
workspaceId: workspace.id,
|
|
||||||
}),
|
|
||||||
spaceService.createSpace({
|
|
||||||
name: "GitHub",
|
|
||||||
description: githubDescription,
|
|
||||||
userId: input.userId,
|
|
||||||
workspaceId: workspace.id,
|
|
||||||
}),
|
|
||||||
spaceService.createSpace({
|
|
||||||
name: "Health",
|
|
||||||
description: healthDescription,
|
|
||||||
userId: input.userId,
|
|
||||||
workspaceId: workspace.id,
|
|
||||||
}),
|
|
||||||
spaceService.createSpace({
|
|
||||||
name: "Fitness",
|
|
||||||
description: fitnessDescription,
|
|
||||||
userId: input.userId,
|
|
||||||
workspaceId: workspace.id,
|
|
||||||
}),
|
|
||||||
]);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await sendEmail({ email: "welcome", to: user.email });
|
const response = await sendEmail({ email: "welcome", to: user.email });
|
||||||
|
|||||||
@ -51,6 +51,7 @@ export const loader = async ({ request }: LoaderFunctionArgs) => {
|
|||||||
const { getTheme } = await themeSessionResolver(request);
|
const { getTheme } = await themeSessionResolver(request);
|
||||||
|
|
||||||
const posthogProjectKey = env.POSTHOG_PROJECT_KEY;
|
const posthogProjectKey = env.POSTHOG_PROJECT_KEY;
|
||||||
|
const telemetryEnabled = env.TELEMETRY_ENABLED;
|
||||||
const user = await getUser(request);
|
const user = await getUser(request);
|
||||||
const usageSummary = await getUsageSummary(user?.Workspace?.id as string);
|
const usageSummary = await getUsageSummary(user?.Workspace?.id as string);
|
||||||
|
|
||||||
@ -62,6 +63,7 @@ export const loader = async ({ request }: LoaderFunctionArgs) => {
|
|||||||
toastMessage,
|
toastMessage,
|
||||||
theme: getTheme(),
|
theme: getTheme(),
|
||||||
posthogProjectKey,
|
posthogProjectKey,
|
||||||
|
telemetryEnabled,
|
||||||
appEnv: env.APP_ENV,
|
appEnv: env.APP_ENV,
|
||||||
appOrigin: env.APP_ORIGIN,
|
appOrigin: env.APP_ORIGIN,
|
||||||
},
|
},
|
||||||
@ -113,8 +115,10 @@ export function ErrorBoundary() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function App() {
|
function App() {
|
||||||
const { posthogProjectKey } = useTypedLoaderData<typeof loader>();
|
const { posthogProjectKey, telemetryEnabled } =
|
||||||
usePostHog(posthogProjectKey);
|
useTypedLoaderData<typeof loader>();
|
||||||
|
|
||||||
|
usePostHog(posthogProjectKey, telemetryEnabled);
|
||||||
const [theme] = useTheme();
|
const [theme] = useTheme();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|||||||
@ -1,44 +0,0 @@
|
|||||||
import { json } from "@remix-run/node";
|
|
||||||
import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
|
||||||
|
|
||||||
import { getWorkspaceByUser } from "~/models/workspace.server";
|
|
||||||
import {
|
|
||||||
createConversation,
|
|
||||||
CreateConversationSchema,
|
|
||||||
getCurrentConversationRun,
|
|
||||||
readConversation,
|
|
||||||
stopConversation,
|
|
||||||
} from "~/services/conversation.server";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
export const ConversationIdSchema = z.object({
|
|
||||||
conversationId: z.string(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const { action, loader } = createActionApiRoute(
|
|
||||||
{
|
|
||||||
params: ConversationIdSchema,
|
|
||||||
allowJWT: true,
|
|
||||||
authorization: {
|
|
||||||
action: "oauth",
|
|
||||||
},
|
|
||||||
corsStrategy: "all",
|
|
||||||
},
|
|
||||||
async ({ authentication, params }) => {
|
|
||||||
const workspace = await getWorkspaceByUser(authentication.userId);
|
|
||||||
|
|
||||||
if (!workspace) {
|
|
||||||
throw new Error("No workspace found");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Call the service to get the redirect URL
|
|
||||||
const run = await getCurrentConversationRun(
|
|
||||||
params.conversationId,
|
|
||||||
workspace?.id,
|
|
||||||
);
|
|
||||||
|
|
||||||
return json(run);
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
export { action, loader };
|
|
||||||
@ -1,41 +0,0 @@
|
|||||||
import { json } from "@remix-run/node";
|
|
||||||
import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
|
||||||
|
|
||||||
import { getWorkspaceByUser } from "~/models/workspace.server";
|
|
||||||
import {
|
|
||||||
createConversation,
|
|
||||||
CreateConversationSchema,
|
|
||||||
readConversation,
|
|
||||||
stopConversation,
|
|
||||||
} from "~/services/conversation.server";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
export const ConversationIdSchema = z.object({
|
|
||||||
conversationId: z.string(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const { action, loader } = createActionApiRoute(
|
|
||||||
{
|
|
||||||
params: ConversationIdSchema,
|
|
||||||
allowJWT: true,
|
|
||||||
authorization: {
|
|
||||||
action: "oauth",
|
|
||||||
},
|
|
||||||
corsStrategy: "all",
|
|
||||||
method: "POST",
|
|
||||||
},
|
|
||||||
async ({ authentication, params }) => {
|
|
||||||
const workspace = await getWorkspaceByUser(authentication.userId);
|
|
||||||
|
|
||||||
if (!workspace) {
|
|
||||||
throw new Error("No workspace found");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Call the service to get the redirect URL
|
|
||||||
const stop = await stopConversation(params.conversationId, workspace?.id);
|
|
||||||
|
|
||||||
return json(stop);
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
export { action, loader };
|
|
||||||
@ -0,0 +1,45 @@
|
|||||||
|
// import { json } from "@remix-run/node";
|
||||||
|
// import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
||||||
|
// import { UI_MESSAGE_STREAM_HEADERS } from "ai";
|
||||||
|
|
||||||
|
// import { getConversationAndHistory } from "~/services/conversation.server";
|
||||||
|
// import { z } from "zod";
|
||||||
|
// import { createResumableStreamContext } from "resumable-stream";
|
||||||
|
|
||||||
|
// export const ConversationIdSchema = z.object({
|
||||||
|
// conversationId: z.string(),
|
||||||
|
// });
|
||||||
|
|
||||||
|
// const { action, loader } = createActionApiRoute(
|
||||||
|
// {
|
||||||
|
// params: ConversationIdSchema,
|
||||||
|
// allowJWT: true,
|
||||||
|
// authorization: {
|
||||||
|
// action: "oauth",
|
||||||
|
// },
|
||||||
|
// corsStrategy: "all",
|
||||||
|
// },
|
||||||
|
// async ({ authentication, params }) => {
|
||||||
|
// const conversation = await getConversationAndHistory(
|
||||||
|
// params.conversationId,
|
||||||
|
// authentication.userId,
|
||||||
|
// );
|
||||||
|
|
||||||
|
// const lastConversation = conversation?.ConversationHistory.pop();
|
||||||
|
|
||||||
|
// if (!lastConversation) {
|
||||||
|
// return json({}, { status: 204 });
|
||||||
|
// }
|
||||||
|
|
||||||
|
// const streamContext = createResumableStreamContext({
|
||||||
|
// waitUntil: null,
|
||||||
|
// });
|
||||||
|
|
||||||
|
// return new Response(
|
||||||
|
// await streamContext.resumeExistingStream(lastConversation.id),
|
||||||
|
// { headers: UI_MESSAGE_STREAM_HEADERS },
|
||||||
|
// );
|
||||||
|
// },
|
||||||
|
// );
|
||||||
|
|
||||||
|
// export { action, loader };
|
||||||
@ -1,50 +0,0 @@
|
|||||||
import { json } from "@remix-run/node";
|
|
||||||
import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
|
||||||
|
|
||||||
import { getWorkspaceByUser } from "~/models/workspace.server";
|
|
||||||
import {
|
|
||||||
getConversation,
|
|
||||||
deleteConversation,
|
|
||||||
} from "~/services/conversation.server";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
export const ConversationIdSchema = z.object({
|
|
||||||
conversationId: z.string(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const { action, loader } = createActionApiRoute(
|
|
||||||
{
|
|
||||||
params: ConversationIdSchema,
|
|
||||||
allowJWT: true,
|
|
||||||
authorization: {
|
|
||||||
action: "oauth",
|
|
||||||
},
|
|
||||||
corsStrategy: "all",
|
|
||||||
},
|
|
||||||
async ({ params, authentication, request }) => {
|
|
||||||
const workspace = await getWorkspaceByUser(authentication.userId);
|
|
||||||
|
|
||||||
if (!workspace) {
|
|
||||||
throw new Error("No workspace found");
|
|
||||||
}
|
|
||||||
|
|
||||||
const method = request.method;
|
|
||||||
|
|
||||||
if (method === "GET") {
|
|
||||||
// Get a conversation by ID
|
|
||||||
const conversation = await getConversation(params.conversationId);
|
|
||||||
return json(conversation);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (method === "DELETE") {
|
|
||||||
// Soft delete a conversation
|
|
||||||
const deleted = await deleteConversation(params.conversationId);
|
|
||||||
return json(deleted);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Method not allowed
|
|
||||||
return new Response("Method Not Allowed", { status: 405 });
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
export { action, loader };
|
|
||||||
@ -1,37 +1,159 @@
|
|||||||
import { json } from "@remix-run/node";
|
|
||||||
import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
|
||||||
|
|
||||||
import { getWorkspaceByUser } from "~/models/workspace.server";
|
|
||||||
import {
|
import {
|
||||||
createConversation,
|
convertToModelMessages,
|
||||||
CreateConversationSchema,
|
streamText,
|
||||||
|
validateUIMessages,
|
||||||
|
type LanguageModel,
|
||||||
|
experimental_createMCPClient as createMCPClient,
|
||||||
|
generateId,
|
||||||
|
stepCountIs,
|
||||||
|
} from "ai";
|
||||||
|
import { z } from "zod";
|
||||||
|
import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js";
|
||||||
|
|
||||||
|
import { createHybridActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
||||||
|
import {
|
||||||
|
createConversationHistory,
|
||||||
|
getConversationAndHistory,
|
||||||
} from "~/services/conversation.server";
|
} from "~/services/conversation.server";
|
||||||
|
|
||||||
const { action, loader } = createActionApiRoute(
|
import { getModel } from "~/lib/model.server";
|
||||||
|
import { UserTypeEnum } from "@core/types";
|
||||||
|
import { nanoid } from "nanoid";
|
||||||
|
import {
|
||||||
|
deletePersonalAccessToken,
|
||||||
|
getOrCreatePersonalAccessToken,
|
||||||
|
} from "~/services/personalAccessToken.server";
|
||||||
|
import {
|
||||||
|
hasAnswer,
|
||||||
|
hasQuestion,
|
||||||
|
REACT_SYSTEM_PROMPT,
|
||||||
|
} from "~/lib/prompt.server";
|
||||||
|
import { enqueueCreateConversationTitle } from "~/lib/queue-adapter.server";
|
||||||
|
import { env } from "~/env.server";
|
||||||
|
|
||||||
|
const ChatRequestSchema = z.object({
|
||||||
|
message: z.object({
|
||||||
|
id: z.string().optional(),
|
||||||
|
parts: z.array(z.any()),
|
||||||
|
role: z.string(),
|
||||||
|
}),
|
||||||
|
id: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const { loader, action } = createHybridActionApiRoute(
|
||||||
{
|
{
|
||||||
body: CreateConversationSchema,
|
body: ChatRequestSchema,
|
||||||
allowJWT: true,
|
allowJWT: true,
|
||||||
authorization: {
|
authorization: {
|
||||||
action: "oauth",
|
action: "conversation",
|
||||||
},
|
},
|
||||||
corsStrategy: "all",
|
corsStrategy: "all",
|
||||||
},
|
},
|
||||||
async ({ body, authentication }) => {
|
async ({ body, authentication }) => {
|
||||||
const workspace = await getWorkspaceByUser(authentication.userId);
|
const randomKeyName = `chat_${nanoid(10)}`;
|
||||||
|
const pat = await getOrCreatePersonalAccessToken({
|
||||||
|
name: randomKeyName,
|
||||||
|
userId: authentication.userId,
|
||||||
|
});
|
||||||
|
|
||||||
if (!workspace) {
|
const message = body.message.parts[0].text;
|
||||||
throw new Error("No workspace found");
|
const id = body.message.id;
|
||||||
}
|
const apiEndpoint = `${env.APP_ORIGIN}/api/v1/mcp?source=core`;
|
||||||
|
const url = new URL(apiEndpoint);
|
||||||
|
|
||||||
// Call the service to get the redirect URL
|
const mcpClient = await createMCPClient({
|
||||||
const conversation = await createConversation(
|
transport: new StreamableHTTPClientTransport(url, {
|
||||||
workspace?.id,
|
requestInit: {
|
||||||
|
headers: pat.token
|
||||||
|
? {
|
||||||
|
Authorization: `Bearer ${pat.token}`,
|
||||||
|
}
|
||||||
|
: {},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const conversation = await getConversationAndHistory(
|
||||||
|
body.id,
|
||||||
authentication.userId,
|
authentication.userId,
|
||||||
body,
|
|
||||||
);
|
);
|
||||||
|
|
||||||
return json(conversation);
|
const conversationHistory = conversation?.ConversationHistory ?? [];
|
||||||
|
|
||||||
|
if (conversationHistory.length === 0) {
|
||||||
|
// Trigger conversation title task
|
||||||
|
await enqueueCreateConversationTitle({
|
||||||
|
conversationId: body.id,
|
||||||
|
message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (conversationHistory.length > 1) {
|
||||||
|
await createConversationHistory(message, body.id, UserTypeEnum.User);
|
||||||
|
}
|
||||||
|
|
||||||
|
const messages = conversationHistory.map((history: any) => {
|
||||||
|
return {
|
||||||
|
parts: [{ text: history.message, type: "text" }],
|
||||||
|
role: "user",
|
||||||
|
id: history.id,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
const tools = { ...(await mcpClient.tools()) };
|
||||||
|
|
||||||
|
const finalMessages = [
|
||||||
|
...messages,
|
||||||
|
{
|
||||||
|
parts: [{ text: message, type: "text" }],
|
||||||
|
role: "user",
|
||||||
|
id: id ?? generateId(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const validatedMessages = await validateUIMessages({
|
||||||
|
messages: finalMessages,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = streamText({
|
||||||
|
model: getModel() as LanguageModel,
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: REACT_SYSTEM_PROMPT,
|
||||||
|
},
|
||||||
|
...convertToModelMessages(validatedMessages),
|
||||||
|
],
|
||||||
|
tools,
|
||||||
|
stopWhen: [stepCountIs(10), hasAnswer, hasQuestion],
|
||||||
|
});
|
||||||
|
|
||||||
|
result.consumeStream(); // no await
|
||||||
|
await deletePersonalAccessToken(pat?.id);
|
||||||
|
|
||||||
|
return result.toUIMessageStreamResponse({
|
||||||
|
originalMessages: validatedMessages,
|
||||||
|
onFinish: async ({ messages }) => {
|
||||||
|
const lastMessage = messages.pop();
|
||||||
|
let message = "";
|
||||||
|
lastMessage?.parts.forEach((part) => {
|
||||||
|
if (part.type === "text") {
|
||||||
|
message += part.text;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
await createConversationHistory(message, body.id, UserTypeEnum.Agent);
|
||||||
|
},
|
||||||
|
// async consumeSseStream({ stream }) {
|
||||||
|
// // Create a resumable stream from the SSE stream
|
||||||
|
// const streamContext = createResumableStreamContext({ waitUntil: null });
|
||||||
|
// await streamContext.createNewResumableStream(
|
||||||
|
// conversation.conversationHistoryId,
|
||||||
|
// () => stream,
|
||||||
|
// );
|
||||||
|
// },
|
||||||
|
});
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
export { action, loader };
|
export { loader, action };
|
||||||
|
|||||||
@ -1,8 +1,27 @@
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { json } from "@remix-run/node";
|
import { json } from "@remix-run/node";
|
||||||
import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
||||||
import { deepSearch } from "~/trigger/deep-search";
|
import { trackFeatureUsage } from "~/services/telemetry.server";
|
||||||
import { runs } from "@trigger.dev/sdk";
|
import { nanoid } from "nanoid";
|
||||||
|
import {
|
||||||
|
deletePersonalAccessToken,
|
||||||
|
getOrCreatePersonalAccessToken,
|
||||||
|
} from "~/services/personalAccessToken.server";
|
||||||
|
|
||||||
|
import {
|
||||||
|
convertToModelMessages,
|
||||||
|
generateId,
|
||||||
|
generateText,
|
||||||
|
type LanguageModel,
|
||||||
|
stepCountIs,
|
||||||
|
streamText,
|
||||||
|
tool,
|
||||||
|
validateUIMessages,
|
||||||
|
} from "ai";
|
||||||
|
import axios from "axios";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import { getReActPrompt, hasAnswer } from "~/lib/prompt.server";
|
||||||
|
import { getModel } from "~/lib/model.server";
|
||||||
|
|
||||||
const DeepSearchBodySchema = z.object({
|
const DeepSearchBodySchema = z.object({
|
||||||
content: z.string().min(1, "Content is required"),
|
content: z.string().min(1, "Content is required"),
|
||||||
@ -17,6 +36,41 @@ const DeepSearchBodySchema = z.object({
|
|||||||
.optional(),
|
.optional(),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
function createSearchMemoryTool(token: string) {
|
||||||
|
return tool({
|
||||||
|
description:
|
||||||
|
"Search the user's memory for relevant facts and episodes. Use this tool multiple times with different queries to gather comprehensive context.",
|
||||||
|
inputSchema: z.object({
|
||||||
|
query: z
|
||||||
|
.string()
|
||||||
|
.describe(
|
||||||
|
"Search query to find relevant information. Be specific: entity names, topics, concepts.",
|
||||||
|
),
|
||||||
|
}),
|
||||||
|
execute: async ({ query }: { query: string }) => {
|
||||||
|
try {
|
||||||
|
const response = await axios.post(
|
||||||
|
`${process.env.API_BASE_URL || "https://core.heysol.ai"}/api/v1/search`,
|
||||||
|
{ query, structured: false },
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${token}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`SearchMemory tool error: ${error}`);
|
||||||
|
return {
|
||||||
|
facts: [],
|
||||||
|
episodes: [],
|
||||||
|
summary: "No results found",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
},
|
||||||
|
} as any);
|
||||||
|
}
|
||||||
|
|
||||||
const { action, loader } = createActionApiRoute(
|
const { action, loader } = createActionApiRoute(
|
||||||
{
|
{
|
||||||
body: DeepSearchBodySchema,
|
body: DeepSearchBodySchema,
|
||||||
@ -28,35 +82,94 @@ const { action, loader } = createActionApiRoute(
|
|||||||
corsStrategy: "all",
|
corsStrategy: "all",
|
||||||
},
|
},
|
||||||
async ({ body, authentication }) => {
|
async ({ body, authentication }) => {
|
||||||
let trigger;
|
// Track deep search
|
||||||
if (!body.stream) {
|
trackFeatureUsage("deep_search_performed", authentication.userId).catch(
|
||||||
trigger = await deepSearch.trigger({
|
console.error,
|
||||||
content: body.content,
|
);
|
||||||
userId: authentication.userId,
|
|
||||||
stream: body.stream,
|
const randomKeyName = `deepSearch_${nanoid(10)}`;
|
||||||
intentOverride: body.intentOverride,
|
|
||||||
metadata: body.metadata,
|
const pat = await getOrCreatePersonalAccessToken({
|
||||||
|
name: randomKeyName,
|
||||||
|
userId: authentication.userId as string,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!pat?.token) {
|
||||||
|
return json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to create personal access token",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Create search tool that agent will use
|
||||||
|
const searchTool = createSearchMemoryTool(pat.token);
|
||||||
|
|
||||||
|
const tools = {
|
||||||
|
searchMemory: searchTool,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build initial messages with ReAct prompt
|
||||||
|
const initialMessages = [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
parts: [
|
||||||
|
{
|
||||||
|
type: "text",
|
||||||
|
text: `CONTENT TO ANALYZE:\n${body.content}\n\nPlease search my memory for relevant context and synthesize what you find.`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
id: generateId(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const validatedMessages = await validateUIMessages({
|
||||||
|
messages: initialMessages,
|
||||||
|
tools,
|
||||||
});
|
});
|
||||||
|
|
||||||
return json(trigger);
|
if (body.stream) {
|
||||||
} else {
|
const result = streamText({
|
||||||
const runHandler = await deepSearch.trigger({
|
model: getModel() as LanguageModel,
|
||||||
content: body.content,
|
messages: [
|
||||||
userId: authentication.userId,
|
{
|
||||||
stream: body.stream,
|
role: "system",
|
||||||
intentOverride: body.intentOverride,
|
content: getReActPrompt(body.metadata, body.intentOverride),
|
||||||
metadata: body.metadata,
|
},
|
||||||
});
|
...convertToModelMessages(validatedMessages),
|
||||||
|
],
|
||||||
|
tools,
|
||||||
|
stopWhen: [hasAnswer, stepCountIs(10)],
|
||||||
|
});
|
||||||
|
|
||||||
for await (const run of runs.subscribeToRun(runHandler.id)) {
|
return result.toUIMessageStreamResponse({
|
||||||
if (run.status === "COMPLETED") {
|
originalMessages: validatedMessages,
|
||||||
return json(run.output);
|
});
|
||||||
} else if (run.status === "FAILED") {
|
} else {
|
||||||
return json(run.error);
|
const { text } = await generateText({
|
||||||
}
|
model: getModel() as LanguageModel,
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: getReActPrompt(body.metadata, body.intentOverride),
|
||||||
|
},
|
||||||
|
...convertToModelMessages(validatedMessages),
|
||||||
|
],
|
||||||
|
tools,
|
||||||
|
stopWhen: [hasAnswer, stepCountIs(10)],
|
||||||
|
});
|
||||||
|
|
||||||
|
await deletePersonalAccessToken(pat?.id);
|
||||||
|
return json({ text });
|
||||||
}
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
await deletePersonalAccessToken(pat?.id);
|
||||||
|
logger.error(`Deep search error: ${error}`);
|
||||||
|
|
||||||
return json({ error: "Run failed" });
|
return json({
|
||||||
|
success: false,
|
||||||
|
error: error.message,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|||||||
@ -1,32 +0,0 @@
|
|||||||
import { z } from "zod";
|
|
||||||
import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
|
||||||
import { json } from "@remix-run/node";
|
|
||||||
import { extensionSearch } from "~/trigger/extension/search";
|
|
||||||
|
|
||||||
export const ExtensionSearchBodyRequest = z.object({
|
|
||||||
input: z.string().min(1, "Input text is required"),
|
|
||||||
outputType: z.string().default("markdown"),
|
|
||||||
});
|
|
||||||
|
|
||||||
const { action, loader } = createActionApiRoute(
|
|
||||||
{
|
|
||||||
body: ExtensionSearchBodyRequest,
|
|
||||||
method: "POST",
|
|
||||||
allowJWT: true,
|
|
||||||
authorization: {
|
|
||||||
action: "search",
|
|
||||||
},
|
|
||||||
corsStrategy: "all",
|
|
||||||
},
|
|
||||||
async ({ body, authentication }) => {
|
|
||||||
const trigger = await extensionSearch.trigger({
|
|
||||||
userInput: body.input,
|
|
||||||
userId: authentication.userId,
|
|
||||||
outputType: body.outputType,
|
|
||||||
});
|
|
||||||
|
|
||||||
return json(trigger);
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
export { action, loader };
|
|
||||||
@ -1,32 +0,0 @@
|
|||||||
import { z } from "zod";
|
|
||||||
import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
|
||||||
import { json } from "@remix-run/node";
|
|
||||||
import { extensionSummary } from "~/trigger/extension/summary";
|
|
||||||
|
|
||||||
export const ExtensionSummaryBodyRequest = z.object({
|
|
||||||
html: z.string().min(1, "HTML content is required"),
|
|
||||||
url: z.string().url("Valid URL is required"),
|
|
||||||
title: z.string().optional(),
|
|
||||||
parseImages: z.boolean().default(false),
|
|
||||||
});
|
|
||||||
|
|
||||||
const { action, loader } = createActionApiRoute(
|
|
||||||
{
|
|
||||||
body: ExtensionSummaryBodyRequest,
|
|
||||||
allowJWT: true,
|
|
||||||
authorization: {
|
|
||||||
action: "search",
|
|
||||||
},
|
|
||||||
corsStrategy: "all",
|
|
||||||
},
|
|
||||||
async ({ body, authentication }) => {
|
|
||||||
const response = await extensionSummary.trigger({
|
|
||||||
...body,
|
|
||||||
apiKey: authentication.apiKey,
|
|
||||||
});
|
|
||||||
|
|
||||||
return json(response);
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
export { action, loader };
|
|
||||||
@ -6,7 +6,7 @@ import {
|
|||||||
deleteIngestionQueue,
|
deleteIngestionQueue,
|
||||||
getIngestionQueue,
|
getIngestionQueue,
|
||||||
} from "~/services/ingestionLogs.server";
|
} from "~/services/ingestionLogs.server";
|
||||||
import { runs, tasks } from "@trigger.dev/sdk";
|
import { findRunningJobs, cancelJob } from "~/services/jobManager.server";
|
||||||
|
|
||||||
export const DeleteEpisodeBodyRequest = z.object({
|
export const DeleteEpisodeBodyRequest = z.object({
|
||||||
id: z.string(),
|
id: z.string(),
|
||||||
@ -37,19 +37,15 @@ const { action, loader } = createHybridActionApiRoute(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const output = ingestionQueue.output as any;
|
const output = ingestionQueue.output as any;
|
||||||
const runningTasks = await runs.list({
|
const runningTasks = await findRunningJobs({
|
||||||
tag: [authentication.userId, ingestionQueue.id],
|
tags: [authentication.userId, ingestionQueue.id],
|
||||||
taskIdentifier: "ingest-episode",
|
taskIdentifier: "ingest-episode",
|
||||||
});
|
});
|
||||||
|
|
||||||
const latestTask = runningTasks.data.find(
|
const latestTask = runningTasks[0];
|
||||||
(task) =>
|
|
||||||
task.tags.includes(authentication.userId) &&
|
|
||||||
task.tags.includes(ingestionQueue.id),
|
|
||||||
);
|
|
||||||
|
|
||||||
if (latestTask && !latestTask?.isCompleted) {
|
if (latestTask && !latestTask.isCompleted) {
|
||||||
runs.cancel(latestTask?.id as string);
|
await cancelJob(latestTask.id);
|
||||||
}
|
}
|
||||||
|
|
||||||
let result;
|
let result;
|
||||||
|
|||||||
@ -8,6 +8,7 @@ import { logger } from "~/services/logger.service";
|
|||||||
import { getWorkspaceByUser } from "~/models/workspace.server";
|
import { getWorkspaceByUser } from "~/models/workspace.server";
|
||||||
import { tasks } from "@trigger.dev/sdk";
|
import { tasks } from "@trigger.dev/sdk";
|
||||||
import { type scheduler } from "~/trigger/integrations/scheduler";
|
import { type scheduler } from "~/trigger/integrations/scheduler";
|
||||||
|
import { isTriggerDeployment } from "~/lib/queue-adapter.server";
|
||||||
|
|
||||||
// Schema for creating an integration account with API key
|
// Schema for creating an integration account with API key
|
||||||
const IntegrationAccountBodySchema = z.object({
|
const IntegrationAccountBodySchema = z.object({
|
||||||
@ -63,6 +64,13 @@ const { action, loader } = createHybridActionApiRoute(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!isTriggerDeployment()) {
|
||||||
|
return json(
|
||||||
|
{ error: "Integrations don't work in non trigger deployment" },
|
||||||
|
{ status: 400 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
await tasks.trigger<typeof scheduler>("scheduler", {
|
await tasks.trigger<typeof scheduler>("scheduler", {
|
||||||
integrationAccountId: setupResult?.account?.id,
|
integrationAccountId: setupResult?.account?.id,
|
||||||
});
|
});
|
||||||
|
|||||||
88
apps/webapp/app/routes/api.v1.logs.$logId.retry.tsx
Normal file
88
apps/webapp/app/routes/api.v1.logs.$logId.retry.tsx
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
import { json } from "@remix-run/node";
|
||||||
|
import { z } from "zod";
|
||||||
|
import { IngestionStatus } from "@core/database";
|
||||||
|
import { getIngestionQueue } from "~/services/ingestionLogs.server";
|
||||||
|
import { createHybridActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
||||||
|
import { addToQueue } from "~/lib/ingest.server";
|
||||||
|
|
||||||
|
// Schema for log ID parameter
|
||||||
|
const LogParamsSchema = z.object({
|
||||||
|
logId: z.string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const { action } = createHybridActionApiRoute(
|
||||||
|
{
|
||||||
|
params: LogParamsSchema,
|
||||||
|
allowJWT: true,
|
||||||
|
method: "POST",
|
||||||
|
authorization: {
|
||||||
|
action: "update",
|
||||||
|
},
|
||||||
|
corsStrategy: "all",
|
||||||
|
},
|
||||||
|
async ({ params, authentication }) => {
|
||||||
|
try {
|
||||||
|
const ingestionQueue = await getIngestionQueue(params.logId);
|
||||||
|
|
||||||
|
if (!ingestionQueue) {
|
||||||
|
return json(
|
||||||
|
{
|
||||||
|
error: "Ingestion log not found",
|
||||||
|
code: "not_found",
|
||||||
|
},
|
||||||
|
{ status: 404 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only allow retry for FAILED status
|
||||||
|
if (ingestionQueue.status !== IngestionStatus.FAILED) {
|
||||||
|
return json(
|
||||||
|
{
|
||||||
|
error: "Only failed ingestion logs can be retried",
|
||||||
|
code: "invalid_status",
|
||||||
|
},
|
||||||
|
{ status: 400 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the original ingestion data
|
||||||
|
const originalData = ingestionQueue.data as any;
|
||||||
|
|
||||||
|
// Re-enqueue the job with the existing queue ID (will upsert)
|
||||||
|
await addToQueue(
|
||||||
|
originalData,
|
||||||
|
authentication.userId,
|
||||||
|
ingestionQueue.activityId || undefined,
|
||||||
|
ingestionQueue.id, // Pass the existing queue ID for upsert
|
||||||
|
);
|
||||||
|
|
||||||
|
return json({
|
||||||
|
success: true,
|
||||||
|
message: "Ingestion retry initiated successfully",
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error retrying ingestion:", error);
|
||||||
|
|
||||||
|
// Handle specific error cases
|
||||||
|
if (error instanceof Error && error.message === "no credits") {
|
||||||
|
return json(
|
||||||
|
{
|
||||||
|
error: "Insufficient credits to retry ingestion",
|
||||||
|
code: "no_credits",
|
||||||
|
},
|
||||||
|
{ status: 402 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return json(
|
||||||
|
{
|
||||||
|
error: "Failed to retry ingestion",
|
||||||
|
code: "internal_error",
|
||||||
|
},
|
||||||
|
{ status: 500 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export { action };
|
||||||
@ -1,5 +1,4 @@
|
|||||||
import { json } from "@remix-run/node";
|
import { json } from "@remix-run/node";
|
||||||
import { runs } from "@trigger.dev/sdk";
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { deleteEpisodeWithRelatedNodes } from "~/services/graphModels/episode";
|
import { deleteEpisodeWithRelatedNodes } from "~/services/graphModels/episode";
|
||||||
import {
|
import {
|
||||||
@ -11,6 +10,7 @@ import {
|
|||||||
createHybridActionApiRoute,
|
createHybridActionApiRoute,
|
||||||
createHybridLoaderApiRoute,
|
createHybridLoaderApiRoute,
|
||||||
} from "~/services/routeBuilders/apiBuilder.server";
|
} from "~/services/routeBuilders/apiBuilder.server";
|
||||||
|
import { findRunningJobs, cancelJob } from "~/services/jobManager.server";
|
||||||
|
|
||||||
// Schema for space ID parameter
|
// Schema for space ID parameter
|
||||||
const LogParamsSchema = z.object({
|
const LogParamsSchema = z.object({
|
||||||
@ -59,19 +59,15 @@ const { action } = createHybridActionApiRoute(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const output = ingestionQueue.output as any;
|
const output = ingestionQueue.output as any;
|
||||||
const runningTasks = await runs.list({
|
const runningTasks = await findRunningJobs({
|
||||||
tag: [authentication.userId, ingestionQueue.id],
|
tags: [authentication.userId, ingestionQueue.id],
|
||||||
taskIdentifier: "ingest-episode",
|
taskIdentifier: "ingest-episode",
|
||||||
});
|
});
|
||||||
|
|
||||||
const latestTask = runningTasks.data.find(
|
const latestTask = runningTasks[0];
|
||||||
(task) =>
|
|
||||||
task.tags.includes(authentication.userId) &&
|
|
||||||
task.tags.includes(ingestionQueue.id),
|
|
||||||
);
|
|
||||||
|
|
||||||
if (latestTask && !latestTask?.isCompleted) {
|
if (latestTask && !latestTask.isCompleted) {
|
||||||
runs.cancel(latestTask?.id);
|
await cancelJob(latestTask.id);
|
||||||
}
|
}
|
||||||
|
|
||||||
let result;
|
let result;
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
import { type LoaderFunctionArgs, json } from "@remix-run/node";
|
import { json } from "@remix-run/node";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { prisma } from "~/db.server";
|
import { prisma } from "~/db.server";
|
||||||
|
|
||||||
import { createHybridLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
import { createHybridLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
||||||
|
|
||||||
// Schema for logs search parameters
|
// Schema for logs search parameters
|
||||||
|
|||||||
84
apps/webapp/app/routes/api.v1.recall-logs.tsx
Normal file
84
apps/webapp/app/routes/api.v1.recall-logs.tsx
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
import { type LoaderFunctionArgs, json } from "@remix-run/node";
|
||||||
|
import { z } from "zod";
|
||||||
|
import { prisma } from "~/db.server";
|
||||||
|
import { createHybridLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
||||||
|
|
||||||
|
// Schema for recall logs search parameters
|
||||||
|
const RecallLogsSearchParams = z.object({
|
||||||
|
page: z.string().optional(),
|
||||||
|
limit: z.string().optional(),
|
||||||
|
query: z.string().optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const loader = createHybridLoaderApiRoute(
|
||||||
|
{
|
||||||
|
allowJWT: true,
|
||||||
|
searchParams: RecallLogsSearchParams,
|
||||||
|
corsStrategy: "all",
|
||||||
|
findResource: async () => 1,
|
||||||
|
},
|
||||||
|
async ({ authentication, searchParams }) => {
|
||||||
|
const page = parseInt(searchParams.page || "1");
|
||||||
|
const limit = parseInt(searchParams.limit || "100");
|
||||||
|
const query = searchParams.query;
|
||||||
|
const skip = (page - 1) * limit;
|
||||||
|
|
||||||
|
// Get user and workspace in one query
|
||||||
|
const user = await prisma.user.findUnique({
|
||||||
|
where: { id: authentication.userId },
|
||||||
|
select: { Workspace: { select: { id: true } } },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!user?.Workspace) {
|
||||||
|
throw new Response("Workspace not found", { status: 404 });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build where clause for filtering
|
||||||
|
const whereClause: any = {
|
||||||
|
workspaceId: user.Workspace.id,
|
||||||
|
deleted: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (query) {
|
||||||
|
whereClause.query = {
|
||||||
|
contains: query,
|
||||||
|
mode: "insensitive",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const [recallLogs, totalCount] = await Promise.all([
|
||||||
|
prisma.recallLog.findMany({
|
||||||
|
where: whereClause,
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
createdAt: true,
|
||||||
|
accessType: true,
|
||||||
|
query: true,
|
||||||
|
targetType: true,
|
||||||
|
targetId: true,
|
||||||
|
searchMethod: true,
|
||||||
|
resultCount: true,
|
||||||
|
similarityScore: true,
|
||||||
|
source: true,
|
||||||
|
},
|
||||||
|
orderBy: {
|
||||||
|
createdAt: "desc",
|
||||||
|
},
|
||||||
|
skip,
|
||||||
|
take: limit,
|
||||||
|
}),
|
||||||
|
|
||||||
|
prisma.recallLog.count({
|
||||||
|
where: whereClause,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return json({
|
||||||
|
recallLogs,
|
||||||
|
totalCount,
|
||||||
|
page,
|
||||||
|
limit,
|
||||||
|
hasMore: skip + recallLogs.length < totalCount,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
);
|
||||||
@ -5,6 +5,7 @@ import {
|
|||||||
} from "~/services/routeBuilders/apiBuilder.server";
|
} from "~/services/routeBuilders/apiBuilder.server";
|
||||||
import { SearchService } from "~/services/search.server";
|
import { SearchService } from "~/services/search.server";
|
||||||
import { json } from "@remix-run/node";
|
import { json } from "@remix-run/node";
|
||||||
|
import { trackFeatureUsage } from "~/services/telemetry.server";
|
||||||
|
|
||||||
export const SearchBodyRequest = z.object({
|
export const SearchBodyRequest = z.object({
|
||||||
query: z.string(),
|
query: z.string(),
|
||||||
@ -20,6 +21,7 @@ export const SearchBodyRequest = z.object({
|
|||||||
scoreThreshold: z.number().optional(),
|
scoreThreshold: z.number().optional(),
|
||||||
minResults: z.number().optional(),
|
minResults: z.number().optional(),
|
||||||
adaptiveFiltering: z.boolean().optional(),
|
adaptiveFiltering: z.boolean().optional(),
|
||||||
|
structured: z.boolean().default(true),
|
||||||
});
|
});
|
||||||
|
|
||||||
const searchService = new SearchService();
|
const searchService = new SearchService();
|
||||||
@ -47,8 +49,13 @@ const { action, loader } = createHybridActionApiRoute(
|
|||||||
minResults: body.minResults,
|
minResults: body.minResults,
|
||||||
spaceIds: body.spaceIds,
|
spaceIds: body.spaceIds,
|
||||||
adaptiveFiltering: body.adaptiveFiltering,
|
adaptiveFiltering: body.adaptiveFiltering,
|
||||||
|
structured: body.structured,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Track search
|
||||||
|
trackFeatureUsage("search_performed", authentication.userId).catch(console.error);
|
||||||
|
|
||||||
return json(results);
|
return json(results);
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|||||||
@ -3,7 +3,7 @@ import { createHybridActionApiRoute } from "~/services/routeBuilders/apiBuilder.
|
|||||||
import { SpaceService } from "~/services/space.server";
|
import { SpaceService } from "~/services/space.server";
|
||||||
import { json } from "@remix-run/node";
|
import { json } from "@remix-run/node";
|
||||||
import { logger } from "~/services/logger.service";
|
import { logger } from "~/services/logger.service";
|
||||||
import { triggerSpaceAssignment } from "~/trigger/spaces/space-assignment";
|
import { enqueueSpaceAssignment } from "~/lib/queue-adapter.server";
|
||||||
|
|
||||||
// Schema for space ID parameter
|
// Schema for space ID parameter
|
||||||
const SpaceParamsSchema = z.object({
|
const SpaceParamsSchema = z.object({
|
||||||
@ -31,7 +31,7 @@ const { loader, action } = createHybridActionApiRoute(
|
|||||||
|
|
||||||
// Trigger automatic episode assignment for the reset space
|
// Trigger automatic episode assignment for the reset space
|
||||||
try {
|
try {
|
||||||
await triggerSpaceAssignment({
|
await enqueueSpaceAssignment({
|
||||||
userId: userId,
|
userId: userId,
|
||||||
workspaceId: space.workspaceId,
|
workspaceId: space.workspaceId,
|
||||||
mode: "new_space",
|
mode: "new_space",
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
||||||
import { json } from "@remix-run/node";
|
import { json } from "@remix-run/node";
|
||||||
import { triggerSpaceAssignment } from "~/trigger/spaces/space-assignment";
|
|
||||||
import { prisma } from "~/db.server";
|
import { prisma } from "~/db.server";
|
||||||
|
import { enqueueSpaceAssignment } from "~/lib/queue-adapter.server";
|
||||||
|
|
||||||
// Schema for manual assignment trigger
|
// Schema for manual assignment trigger
|
||||||
const ManualAssignmentSchema = z.object({
|
const ManualAssignmentSchema = z.object({
|
||||||
@ -38,7 +38,7 @@ const { action } = createActionApiRoute(
|
|||||||
let taskRun;
|
let taskRun;
|
||||||
|
|
||||||
// Direct LLM assignment trigger
|
// Direct LLM assignment trigger
|
||||||
taskRun = await triggerSpaceAssignment({
|
taskRun = await enqueueSpaceAssignment({
|
||||||
userId,
|
userId,
|
||||||
workspaceId: user?.Workspace?.id as string,
|
workspaceId: user?.Workspace?.id as string,
|
||||||
mode: body.mode,
|
mode: body.mode,
|
||||||
@ -49,7 +49,7 @@ const { action } = createActionApiRoute(
|
|||||||
return json({
|
return json({
|
||||||
success: true,
|
success: true,
|
||||||
message: `${body.mode} assignment task triggered successfully`,
|
message: `${body.mode} assignment task triggered successfully`,
|
||||||
taskId: taskRun.id,
|
|
||||||
payload: {
|
payload: {
|
||||||
userId,
|
userId,
|
||||||
mode: body.mode,
|
mode: body.mode,
|
||||||
|
|||||||
@ -7,6 +7,10 @@ import { SpaceService } from "~/services/space.server";
|
|||||||
import { json } from "@remix-run/node";
|
import { json } from "@remix-run/node";
|
||||||
import { prisma } from "~/db.server";
|
import { prisma } from "~/db.server";
|
||||||
import { apiCors } from "~/utils/apiCors";
|
import { apiCors } from "~/utils/apiCors";
|
||||||
|
import {
|
||||||
|
enqueueSpaceAssignment,
|
||||||
|
isTriggerDeployment,
|
||||||
|
} from "~/lib/queue-adapter.server";
|
||||||
|
|
||||||
const spaceService = new SpaceService();
|
const spaceService = new SpaceService();
|
||||||
|
|
||||||
@ -40,6 +44,13 @@ const { action } = createHybridActionApiRoute(
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (!isTriggerDeployment()) {
|
||||||
|
return json(
|
||||||
|
{ error: "Spaces don't work in non trigger deployment" },
|
||||||
|
{ status: 400 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if (!user?.Workspace?.id) {
|
if (!user?.Workspace?.id) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
"Workspace ID is required to create an ingestion queue entry.",
|
"Workspace ID is required to create an ingestion queue entry.",
|
||||||
@ -66,6 +77,14 @@ const { action } = createHybridActionApiRoute(
|
|||||||
workspaceId: user.Workspace.id,
|
workspaceId: user.Workspace.id,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
await enqueueSpaceAssignment({
|
||||||
|
userId: user.id,
|
||||||
|
workspaceId: user.Workspace.id,
|
||||||
|
mode: "new_space",
|
||||||
|
newSpaceId: space.id,
|
||||||
|
batchSize: 25, // Analyze recent statements for the new space
|
||||||
|
});
|
||||||
|
|
||||||
return json({ space, success: true });
|
return json({ space, success: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
70
apps/webapp/app/routes/api.v1.user.delete.tsx
Normal file
70
apps/webapp/app/routes/api.v1.user.delete.tsx
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
import { json } from "@remix-run/node";
|
||||||
|
import { deleteUser, getUserById } from "~/models/user.server";
|
||||||
|
import { sessionStorage } from "~/services/sessionStorage.server";
|
||||||
|
import { cancelSubscriptionImmediately } from "~/services/stripe.server";
|
||||||
|
import { isBillingEnabled } from "~/config/billing.server";
|
||||||
|
import { prisma } from "~/db.server";
|
||||||
|
import { createHybridActionApiRoute } from "~/services/routeBuilders/apiBuilder.server";
|
||||||
|
|
||||||
|
const { action, loader } = createHybridActionApiRoute(
|
||||||
|
{
|
||||||
|
corsStrategy: "all",
|
||||||
|
allowJWT: true,
|
||||||
|
method: "DELETE",
|
||||||
|
authorization: {
|
||||||
|
action: "delete",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
async ({ authentication, request }) => {
|
||||||
|
try {
|
||||||
|
const user = await getUserById(authentication.userId);
|
||||||
|
|
||||||
|
if (!user || !user.Workspace) {
|
||||||
|
throw new Error("No user or workspace found");
|
||||||
|
}
|
||||||
|
|
||||||
|
// If billing is enabled, cancel any active subscriptions
|
||||||
|
if (isBillingEnabled()) {
|
||||||
|
const subscription = await prisma.subscription.findUnique({
|
||||||
|
where: { workspaceId: user?.Workspace?.id! },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (subscription?.stripeSubscriptionId) {
|
||||||
|
try {
|
||||||
|
await cancelSubscriptionImmediately(
|
||||||
|
subscription.stripeSubscriptionId,
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to cancel Stripe subscription:", error);
|
||||||
|
// Continue with deletion even if Stripe cancellation fails
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete the user and all associated data
|
||||||
|
await deleteUser(user.id);
|
||||||
|
|
||||||
|
// Destroy the session
|
||||||
|
const session = await sessionStorage.getSession(
|
||||||
|
request.headers.get("Cookie"),
|
||||||
|
);
|
||||||
|
|
||||||
|
return json(
|
||||||
|
{ success: true },
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
"Set-Cookie": await sessionStorage.destroySession(session),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error deleting user:", error);
|
||||||
|
return json(
|
||||||
|
{ error: "Failed to delete account. Please try again." },
|
||||||
|
{ status: 500 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export { action, loader };
|
||||||
@ -1,35 +1,26 @@
|
|||||||
import {
|
import { type LoaderFunctionArgs } from "@remix-run/server-runtime";
|
||||||
type LoaderFunctionArgs,
|
|
||||||
type ActionFunctionArgs,
|
|
||||||
} from "@remix-run/server-runtime";
|
|
||||||
import { sort } from "fast-sort";
|
|
||||||
|
|
||||||
import { useParams, useRevalidator, useNavigate } from "@remix-run/react";
|
import { useParams, useNavigate } from "@remix-run/react";
|
||||||
import { requireUser, requireWorkpace } from "~/services/session.server";
|
import { requireUser, requireWorkpace } from "~/services/session.server";
|
||||||
import {
|
import { getConversationAndHistory } from "~/services/conversation.server";
|
||||||
getConversationAndHistory,
|
|
||||||
getCurrentConversationRun,
|
|
||||||
stopConversation,
|
|
||||||
} from "~/services/conversation.server";
|
|
||||||
import { type ConversationHistory } from "@core/database";
|
|
||||||
import {
|
import {
|
||||||
ConversationItem,
|
ConversationItem,
|
||||||
ConversationTextarea,
|
ConversationTextarea,
|
||||||
StreamingConversation,
|
|
||||||
} from "~/components/conversation";
|
} from "~/components/conversation";
|
||||||
import { useTypedLoaderData } from "remix-typedjson";
|
import { useTypedLoaderData } from "remix-typedjson";
|
||||||
import React from "react";
|
|
||||||
import { ScrollAreaWithAutoScroll } from "~/components/use-auto-scroll";
|
import { ScrollAreaWithAutoScroll } from "~/components/use-auto-scroll";
|
||||||
import { PageHeader } from "~/components/common/page-header";
|
import { PageHeader } from "~/components/common/page-header";
|
||||||
import { Plus } from "lucide-react";
|
import { Plus } from "lucide-react";
|
||||||
|
|
||||||
import { json } from "@remix-run/node";
|
import { type UIMessage, useChat } from "@ai-sdk/react";
|
||||||
import { env } from "~/env.server";
|
import { DefaultChatTransport } from "ai";
|
||||||
|
import { UserTypeEnum } from "@core/types";
|
||||||
|
import React from "react";
|
||||||
|
|
||||||
// Example loader accessing params
|
// Example loader accessing params
|
||||||
export async function loader({ params, request }: LoaderFunctionArgs) {
|
export async function loader({ params, request }: LoaderFunctionArgs) {
|
||||||
const user = await requireUser(request);
|
const user = await requireUser(request);
|
||||||
const workspace = await requireWorkpace(request);
|
|
||||||
const conversation = await getConversationAndHistory(
|
const conversation = await getConversationAndHistory(
|
||||||
params.conversationId as string,
|
params.conversationId as string,
|
||||||
user.id,
|
user.id,
|
||||||
@ -39,81 +30,38 @@ export async function loader({ params, request }: LoaderFunctionArgs) {
|
|||||||
throw new Error("No conversation found");
|
throw new Error("No conversation found");
|
||||||
}
|
}
|
||||||
|
|
||||||
const run = await getCurrentConversationRun(conversation.id, workspace.id);
|
return { conversation };
|
||||||
|
|
||||||
return { conversation, run, apiURL: env.TRIGGER_API_URL };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Example action accessing params
|
|
||||||
export async function action({ params, request }: ActionFunctionArgs) {
|
|
||||||
if (request.method.toUpperCase() !== "POST") {
|
|
||||||
return new Response("Method Not Allowed", { status: 405 });
|
|
||||||
}
|
|
||||||
const workspace = await requireWorkpace(request);
|
|
||||||
// params.conversationId will be available here
|
|
||||||
const { conversationId } = params;
|
|
||||||
|
|
||||||
if (!conversationId) {
|
|
||||||
throw new Error("No conversation");
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await stopConversation(conversationId, workspace.id);
|
|
||||||
return json(result);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Accessing params in the component
|
// Accessing params in the component
|
||||||
export default function SingleConversation() {
|
export default function SingleConversation() {
|
||||||
const { conversation, run, apiURL } = useTypedLoaderData<typeof loader>();
|
const { conversation } = useTypedLoaderData<typeof loader>();
|
||||||
const conversationHistory = conversation.ConversationHistory;
|
|
||||||
|
|
||||||
const [conversationResponse, setConversationResponse] = React.useState<
|
|
||||||
{ conversationHistoryId: string; id: string; token: string } | undefined
|
|
||||||
>(run);
|
|
||||||
|
|
||||||
const { conversationId } = useParams();
|
|
||||||
const revalidator = useRevalidator();
|
|
||||||
|
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
|
const { conversationId } = useParams();
|
||||||
|
|
||||||
|
const { sendMessage, messages, status, stop, regenerate } = useChat({
|
||||||
|
id: conversationId, // use the provided chat ID
|
||||||
|
messages: conversation.ConversationHistory.map(
|
||||||
|
(history) =>
|
||||||
|
({
|
||||||
|
role: history.userType === UserTypeEnum.Agent ? "assistant" : "user",
|
||||||
|
parts: [{ text: history.message, type: "text" }],
|
||||||
|
}) as UIMessage,
|
||||||
|
), // load initial messages
|
||||||
|
transport: new DefaultChatTransport({
|
||||||
|
api: "/api/v1/conversation",
|
||||||
|
prepareSendMessagesRequest({ messages, id }) {
|
||||||
|
return { body: { message: messages[messages.length - 1], id } };
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
console.log("new", messages);
|
||||||
|
|
||||||
React.useEffect(() => {
|
React.useEffect(() => {
|
||||||
if (run) {
|
if (messages.length === 1) {
|
||||||
setConversationResponse(run);
|
regenerate();
|
||||||
}
|
}
|
||||||
}, [run]);
|
}, []);
|
||||||
|
|
||||||
const getConversations = () => {
|
|
||||||
const lastConversationHistoryId =
|
|
||||||
conversationResponse?.conversationHistoryId;
|
|
||||||
|
|
||||||
// First sort the conversation history by creation time
|
|
||||||
const sortedConversationHistory = sort(conversationHistory).asc(
|
|
||||||
(ch) => ch.createdAt,
|
|
||||||
);
|
|
||||||
|
|
||||||
const lastIndex = sortedConversationHistory.findIndex(
|
|
||||||
(item) => item.id === lastConversationHistoryId,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Filter out any conversation history items that come after the lastConversationHistoryId
|
|
||||||
const filteredConversationHistory = lastConversationHistoryId
|
|
||||||
? sortedConversationHistory.filter((_ch, currentIndex: number) => {
|
|
||||||
// Find the index of the last conversation history
|
|
||||||
|
|
||||||
// Only keep items that come before or are the last conversation history
|
|
||||||
return currentIndex <= lastIndex;
|
|
||||||
})
|
|
||||||
: sortedConversationHistory;
|
|
||||||
|
|
||||||
return (
|
|
||||||
<>
|
|
||||||
{filteredConversationHistory.map(
|
|
||||||
(ch: ConversationHistory, index: number) => {
|
|
||||||
return <ConversationItem key={index} conversationHistory={ch} />;
|
|
||||||
},
|
|
||||||
)}
|
|
||||||
</>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
if (typeof window === "undefined") {
|
if (typeof window === "undefined") {
|
||||||
return null;
|
return null;
|
||||||
@ -140,31 +88,23 @@ export default function SingleConversation() {
|
|||||||
<div className="relative flex h-[calc(100vh_-_56px)] w-full flex-col items-center justify-center overflow-auto">
|
<div className="relative flex h-[calc(100vh_-_56px)] w-full flex-col items-center justify-center overflow-auto">
|
||||||
<div className="flex h-[calc(100vh_-_80px)] w-full flex-col justify-end overflow-hidden">
|
<div className="flex h-[calc(100vh_-_80px)] w-full flex-col justify-end overflow-hidden">
|
||||||
<ScrollAreaWithAutoScroll>
|
<ScrollAreaWithAutoScroll>
|
||||||
{getConversations()}
|
{messages.map((message: UIMessage, index: number) => {
|
||||||
{conversationResponse && (
|
return <ConversationItem key={index} message={message} />;
|
||||||
<StreamingConversation
|
})}
|
||||||
runId={conversationResponse.id}
|
|
||||||
token={conversationResponse.token}
|
|
||||||
afterStreaming={() => {
|
|
||||||
setConversationResponse(undefined);
|
|
||||||
revalidator.revalidate();
|
|
||||||
}}
|
|
||||||
apiURL={apiURL}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</ScrollAreaWithAutoScroll>
|
</ScrollAreaWithAutoScroll>
|
||||||
|
|
||||||
<div className="flex w-full flex-col items-center">
|
<div className="flex w-full flex-col items-center">
|
||||||
<div className="w-full max-w-[97ch] px-1 pr-2">
|
<div className="w-full max-w-[80ch] px-1 pr-2">
|
||||||
{conversation?.status !== "need_approval" && (
|
<ConversationTextarea
|
||||||
<ConversationTextarea
|
className="bg-background-3 w-full border-1 border-gray-300"
|
||||||
conversationId={conversationId as string}
|
isLoading={status === "streaming" || status === "submitted"}
|
||||||
className="bg-background-3 w-full border-1 border-gray-300"
|
onConversationCreated={(message) => {
|
||||||
isLoading={
|
if (message) {
|
||||||
!!conversationResponse || conversation?.status === "running"
|
sendMessage({ text: message });
|
||||||
}
|
}
|
||||||
/>
|
}}
|
||||||
)}
|
stop={() => stop()}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -4,7 +4,7 @@ import {
|
|||||||
} from "@remix-run/server-runtime";
|
} from "@remix-run/server-runtime";
|
||||||
import { useTypedLoaderData } from "remix-typedjson";
|
import { useTypedLoaderData } from "remix-typedjson";
|
||||||
import { parse } from "@conform-to/zod";
|
import { parse } from "@conform-to/zod";
|
||||||
|
import { redirect, json } from "@remix-run/node";
|
||||||
import {
|
import {
|
||||||
requireUser,
|
requireUser,
|
||||||
requireUserId,
|
requireUserId,
|
||||||
@ -16,7 +16,7 @@ import {
|
|||||||
createConversation,
|
createConversation,
|
||||||
CreateConversationSchema,
|
CreateConversationSchema,
|
||||||
} from "~/services/conversation.server";
|
} from "~/services/conversation.server";
|
||||||
import { json } from "@remix-run/node";
|
|
||||||
import { PageHeader } from "~/components/common/page-header";
|
import { PageHeader } from "~/components/common/page-header";
|
||||||
|
|
||||||
export async function loader({ request }: LoaderFunctionArgs) {
|
export async function loader({ request }: LoaderFunctionArgs) {
|
||||||
@ -43,21 +43,19 @@ export async function action({ request }: ActionFunctionArgs) {
|
|||||||
|
|
||||||
const conversation = await createConversation(workspace?.id, userId, {
|
const conversation = await createConversation(workspace?.id, userId, {
|
||||||
message: submission.value.message,
|
message: submission.value.message,
|
||||||
title: submission.value.title,
|
title: submission.value.title ?? "Untitled",
|
||||||
conversationId: submission.value.conversationId,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Redirect to the conversation page after creation
|
// If conversationId exists in submission, return the conversation data (don't redirect)
|
||||||
// conversationId may be in different places depending on createConversation logic
|
if (submission.value.conversationId) {
|
||||||
|
return json({ conversation });
|
||||||
|
}
|
||||||
|
|
||||||
|
// For new conversations (no conversationId), redirect to the conversation page
|
||||||
const conversationId = conversation?.conversationId;
|
const conversationId = conversation?.conversationId;
|
||||||
|
|
||||||
if (conversationId) {
|
if (conversationId) {
|
||||||
return new Response(null, {
|
return redirect(`/home/conversation/${conversationId}`);
|
||||||
status: 302,
|
|
||||||
headers: {
|
|
||||||
Location: `/home/conversation/${conversationId}`,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// fallback: just return the conversation object
|
// fallback: just return the conversation object
|
||||||
|
|||||||
@ -40,7 +40,7 @@ export default function InboxNotSelected() {
|
|||||||
<PageHeader
|
<PageHeader
|
||||||
title="Episode"
|
title="Episode"
|
||||||
showTrigger={false}
|
showTrigger={false}
|
||||||
actionsNode={<LogOptions id={log.id} />}
|
actionsNode={<LogOptions id={log.id} status={log.status} />}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<LogDetails log={log as any} />
|
<LogDetails log={log as any} />
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import { useState } from "react";
|
import { useEffect, useState } from "react";
|
||||||
import { useLogs } from "~/hooks/use-logs";
|
import { useLogs } from "~/hooks/use-logs";
|
||||||
import { LogsFilters } from "~/components/logs/logs-filters";
|
import { LogsFilters } from "~/components/logs/logs-filters";
|
||||||
import { VirtualLogsList } from "~/components/logs/virtual-logs-list";
|
import { VirtualLogsList } from "~/components/logs/virtual-logs-list";
|
||||||
@ -12,11 +12,13 @@ import {
|
|||||||
} from "~/components/ui/resizable";
|
} from "~/components/ui/resizable";
|
||||||
import { Outlet, useParams } from "@remix-run/react";
|
import { Outlet, useParams } from "@remix-run/react";
|
||||||
import { cn } from "~/lib/utils";
|
import { cn } from "~/lib/utils";
|
||||||
|
import { OnboardingModal } from "~/components/onboarding";
|
||||||
|
|
||||||
export default function LogsAll() {
|
export default function LogsAll() {
|
||||||
const [selectedSource, setSelectedSource] = useState<string | undefined>();
|
const [selectedSource, setSelectedSource] = useState<string | undefined>();
|
||||||
const [selectedStatus, setSelectedStatus] = useState<string | undefined>();
|
const [selectedStatus, setSelectedStatus] = useState<string | undefined>();
|
||||||
const [selectedType, setSelectedType] = useState<string | undefined>();
|
const [selectedType, setSelectedType] = useState<string | undefined>();
|
||||||
|
const [onboarding, setOnboarding] = useState(false);
|
||||||
|
|
||||||
const { logId } = useParams();
|
const { logId } = useParams();
|
||||||
|
|
||||||
@ -34,6 +36,12 @@ export default function LogsAll() {
|
|||||||
type: selectedType,
|
type: selectedType,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!isLoading && logs && logs.length === 1) {
|
||||||
|
setOnboarding(true);
|
||||||
|
}
|
||||||
|
}, [logs.length, isLoading]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<ResizablePanelGroup direction="horizontal">
|
<ResizablePanelGroup direction="horizontal">
|
||||||
@ -117,6 +125,16 @@ export default function LogsAll() {
|
|||||||
<Outlet />
|
<Outlet />
|
||||||
</ResizablePanel>
|
</ResizablePanel>
|
||||||
</ResizablePanelGroup>
|
</ResizablePanelGroup>
|
||||||
|
|
||||||
|
<OnboardingModal
|
||||||
|
isOpen={onboarding}
|
||||||
|
onClose={() => {
|
||||||
|
setOnboarding(false);
|
||||||
|
}}
|
||||||
|
onComplete={() => {
|
||||||
|
setOnboarding(false);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
216
apps/webapp/app/routes/settings.account.tsx
Normal file
216
apps/webapp/app/routes/settings.account.tsx
Normal file
@ -0,0 +1,216 @@
|
|||||||
|
import { json, type LoaderFunctionArgs } from "@remix-run/node";
|
||||||
|
import { useLoaderData, useFetcher, useNavigate } from "@remix-run/react";
|
||||||
|
import { requireUser } from "~/services/session.server";
|
||||||
|
import { Card } from "~/components/ui/card";
|
||||||
|
import { Button } from "~/components/ui/button";
|
||||||
|
import { Input } from "~/components/ui/input";
|
||||||
|
import { Label } from "~/components/ui/label";
|
||||||
|
import { AlertTriangle } from "lucide-react";
|
||||||
|
import { useState } from "react";
|
||||||
|
import {
|
||||||
|
AlertDialog,
|
||||||
|
AlertDialogAction,
|
||||||
|
AlertDialogCancel,
|
||||||
|
AlertDialogContent,
|
||||||
|
AlertDialogDescription,
|
||||||
|
AlertDialogFooter,
|
||||||
|
AlertDialogHeader,
|
||||||
|
AlertDialogTitle,
|
||||||
|
} from "~/components/ui/alert-dialog";
|
||||||
|
import { SettingSection } from "~/components/setting-section";
|
||||||
|
|
||||||
|
interface SuccessDataResponse {
|
||||||
|
success: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ErrorDataResponse {
|
||||||
|
error: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const loader = async ({ request }: LoaderFunctionArgs) => {
|
||||||
|
const user = await requireUser(request);
|
||||||
|
|
||||||
|
return json({
|
||||||
|
user,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
export default function AccountSettings() {
|
||||||
|
const { user } = useLoaderData<typeof loader>();
|
||||||
|
const fetcher = useFetcher<SuccessDataResponse | ErrorDataResponse>();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
const [showDeleteDialog, setShowDeleteDialog] = useState(false);
|
||||||
|
const [confirmText, setConfirmText] = useState("");
|
||||||
|
const isDeleting = fetcher.state === "submitting";
|
||||||
|
|
||||||
|
const handleDeleteAccount = () => {
|
||||||
|
fetcher.submit(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
method: "DELETE",
|
||||||
|
action: "/api/v1/user/delete",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Redirect to login after successful deletion
|
||||||
|
if (fetcher.data && "success" in fetcher.data && fetcher.data.success) {
|
||||||
|
setTimeout(() => {
|
||||||
|
navigate("/login");
|
||||||
|
}, 1000);
|
||||||
|
}
|
||||||
|
|
||||||
|
const canDelete = confirmText === user.email;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="mx-auto flex w-3xl flex-col gap-4 px-4 py-6">
|
||||||
|
<SettingSection
|
||||||
|
title="Account Settings"
|
||||||
|
description="Manage your account information and preferences"
|
||||||
|
>
|
||||||
|
<>
|
||||||
|
{/* Account Information */}
|
||||||
|
<div className="mb-8">
|
||||||
|
<h2 className="mb-4 text-lg font-semibold">Account Information</h2>
|
||||||
|
<Card className="p-6">
|
||||||
|
<div className="space-y-4">
|
||||||
|
<div>
|
||||||
|
<Label className="text-muted-foreground text-sm">Email</Label>
|
||||||
|
<p className="text-base font-medium">{user.email}</p>
|
||||||
|
</div>
|
||||||
|
{user.name && (
|
||||||
|
<div>
|
||||||
|
<Label className="text-muted-foreground text-sm">
|
||||||
|
Name
|
||||||
|
</Label>
|
||||||
|
<p className="text-base font-medium">{user.name}</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{user.displayName && (
|
||||||
|
<div>
|
||||||
|
<Label className="text-muted-foreground text-sm">
|
||||||
|
Display Name
|
||||||
|
</Label>
|
||||||
|
<p className="text-base font-medium">{user.displayName}</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div>
|
||||||
|
<Label className="text-muted-foreground text-sm">
|
||||||
|
Account Created
|
||||||
|
</Label>
|
||||||
|
<p className="text-base font-medium">
|
||||||
|
{new Date(user.createdAt).toLocaleDateString()}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Danger Zone */}
|
||||||
|
<div className="mb-8">
|
||||||
|
<h2 className="mb-4 text-lg font-semibold text-red-600 dark:text-red-400">
|
||||||
|
Danger Zone
|
||||||
|
</h2>
|
||||||
|
<Card className="p-6">
|
||||||
|
<div className="flex items-start gap-3">
|
||||||
|
<AlertTriangle className="mt-1 h-5 w-5 text-red-600 dark:text-red-400" />
|
||||||
|
<div className="flex-1">
|
||||||
|
<h3 className="font-semibold text-red-900 dark:text-red-100">
|
||||||
|
Delete Account
|
||||||
|
</h3>
|
||||||
|
<p className="mb-4 text-sm text-red-700 dark:text-red-300">
|
||||||
|
Permanently delete your account and all associated data.
|
||||||
|
This action cannot be undone.
|
||||||
|
</p>
|
||||||
|
<ul className="mb-4 list-inside list-disc space-y-1 text-sm">
|
||||||
|
<li>All your memories and conversations will be deleted</li>
|
||||||
|
<li>All integration connections will be removed</li>
|
||||||
|
<li>All API keys and webhooks will be revoked</li>
|
||||||
|
<li>
|
||||||
|
Your workspace and all its data will be permanently lost
|
||||||
|
</li>
|
||||||
|
<li>Active subscriptions will be cancelled immediately</li>
|
||||||
|
</ul>
|
||||||
|
<Button
|
||||||
|
variant="destructive"
|
||||||
|
onClick={() => setShowDeleteDialog(true)}
|
||||||
|
disabled={isDeleting}
|
||||||
|
>
|
||||||
|
Delete My Account
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
</SettingSection>
|
||||||
|
|
||||||
|
{/* Delete Confirmation Dialog */}
|
||||||
|
<AlertDialog open={showDeleteDialog} onOpenChange={setShowDeleteDialog}>
|
||||||
|
<AlertDialogContent>
|
||||||
|
<AlertDialogHeader>
|
||||||
|
<AlertDialogTitle>Are you absolutely sure?</AlertDialogTitle>
|
||||||
|
<AlertDialogDescription asChild>
|
||||||
|
<div className="space-y-4">
|
||||||
|
<p>
|
||||||
|
This action <strong>cannot be undone</strong>. This will
|
||||||
|
permanently delete your account and remove all your data from
|
||||||
|
our servers.
|
||||||
|
</p>
|
||||||
|
<div>
|
||||||
|
<Label
|
||||||
|
htmlFor="confirm-email"
|
||||||
|
className="text-sm font-medium"
|
||||||
|
>
|
||||||
|
To confirm, type your email address:{" "}
|
||||||
|
<span className="font-mono">{user.email}</span>
|
||||||
|
</Label>
|
||||||
|
<Input
|
||||||
|
id="confirm-email"
|
||||||
|
type="text"
|
||||||
|
value={confirmText}
|
||||||
|
onChange={(e) => setConfirmText(e.target.value)}
|
||||||
|
placeholder="Enter your email"
|
||||||
|
className="mt-2"
|
||||||
|
autoComplete="off"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</AlertDialogDescription>
|
||||||
|
</AlertDialogHeader>
|
||||||
|
<AlertDialogFooter>
|
||||||
|
<AlertDialogCancel
|
||||||
|
onClick={() => {
|
||||||
|
setConfirmText("");
|
||||||
|
}}
|
||||||
|
disabled={isDeleting}
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</AlertDialogCancel>
|
||||||
|
<AlertDialogAction
|
||||||
|
onClick={handleDeleteAccount}
|
||||||
|
disabled={!canDelete || isDeleting}
|
||||||
|
className="bg-red-600 text-white hover:bg-red-700"
|
||||||
|
>
|
||||||
|
{isDeleting ? "Deleting..." : "Delete Account Permanently"}
|
||||||
|
</AlertDialogAction>
|
||||||
|
</AlertDialogFooter>
|
||||||
|
</AlertDialogContent>
|
||||||
|
</AlertDialog>
|
||||||
|
|
||||||
|
{/* Success Message */}
|
||||||
|
{fetcher.data && "success" in fetcher.data && fetcher.data.success && (
|
||||||
|
<div className="fixed right-4 bottom-4 rounded-md bg-green-600 p-4 text-white shadow-lg">
|
||||||
|
Account deleted successfully. Redirecting...
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Error Message */}
|
||||||
|
{fetcher.data && "error" in fetcher.data && (
|
||||||
|
<div className="fixed right-4 bottom-4 rounded-md bg-red-600 p-4 text-white shadow-lg">
|
||||||
|
{fetcher.data.error}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -36,6 +36,7 @@ import {
|
|||||||
} from "~/components/ui/alert-dialog";
|
} from "~/components/ui/alert-dialog";
|
||||||
import { prisma } from "~/db.server";
|
import { prisma } from "~/db.server";
|
||||||
import { isBillingEnabled } from "~/config/billing.server";
|
import { isBillingEnabled } from "~/config/billing.server";
|
||||||
|
import { SettingSection } from "~/components/setting-section";
|
||||||
|
|
||||||
export const loader = async ({ request }: LoaderFunctionArgs) => {
|
export const loader = async ({ request }: LoaderFunctionArgs) => {
|
||||||
const user = await requireUser(request);
|
const user = await requireUser(request);
|
||||||
@ -230,218 +231,233 @@ export default function BillingSettings() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="p-8">
|
<div className="mx-auto flex w-3xl flex-col gap-4 px-4 py-6">
|
||||||
{/* Header */}
|
<SettingSection
|
||||||
<div className="mb-8">
|
title="Billing"
|
||||||
<h1 className="text-2xl font-bold">Billing</h1>
|
description=" Manage your subscription, usage, and billing history"
|
||||||
<p className="text-muted-foreground">
|
>
|
||||||
Manage your subscription, usage, and billing history
|
<>
|
||||||
</p>
|
{/* Usage Section */}
|
||||||
</div>
|
<div className="mb-8">
|
||||||
|
<h2 className="mb-4 text-lg font-semibold">Current Usage</h2>
|
||||||
|
|
||||||
{/* Usage Section */}
|
<div className="grid gap-4 md:grid-cols-3">
|
||||||
<div className="mb-8">
|
{/* Credits Card */}
|
||||||
<h2 className="mb-4 text-lg font-semibold">Current Usage</h2>
|
<Card className="p-6">
|
||||||
|
<div className="mb-2 flex items-center justify-between">
|
||||||
<div className="grid gap-4 md:grid-cols-3">
|
<span className="text-muted-foreground text-sm">Credits</span>
|
||||||
{/* Credits Card */}
|
<CreditCard className="text-muted-foreground h-4 w-4" />
|
||||||
<Card className="p-6">
|
</div>
|
||||||
<div className="mb-2 flex items-center justify-between">
|
<div className="mb-2">
|
||||||
<span className="text-muted-foreground text-sm">Credits</span>
|
<span className="text-3xl font-bold">
|
||||||
<CreditCard className="text-muted-foreground h-4 w-4" />
|
{usageSummary.credits.available}
|
||||||
</div>
|
</span>
|
||||||
<div className="mb-2">
|
<span className="text-muted-foreground">
|
||||||
<span className="text-3xl font-bold">
|
{" "}
|
||||||
{usageSummary.credits.available}
|
/ {usageSummary.credits.monthly}
|
||||||
</span>
|
</span>
|
||||||
<span className="text-muted-foreground">
|
</div>
|
||||||
{" "}
|
<Progress
|
||||||
/ {usageSummary.credits.monthly}
|
segments={[
|
||||||
</span>
|
{ value: 100 - usageSummary.credits.percentageUsed },
|
||||||
</div>
|
]}
|
||||||
<Progress
|
className="mb-2"
|
||||||
segments={[{ value: 100 - usageSummary.credits.percentageUsed }]}
|
color="#c15e50"
|
||||||
className="mb-2"
|
/>
|
||||||
color="#c15e50"
|
<p className="text-muted-foreground text-xs">
|
||||||
/>
|
{usageSummary.credits.percentageUsed}% used this period
|
||||||
<p className="text-muted-foreground text-xs">
|
|
||||||
{usageSummary.credits.percentageUsed}% used this period
|
|
||||||
</p>
|
|
||||||
</Card>
|
|
||||||
|
|
||||||
{/* Usage Breakdown */}
|
|
||||||
<Card className="p-6">
|
|
||||||
<div className="mb-2 flex items-center justify-between">
|
|
||||||
<span className="text-muted-foreground text-sm">
|
|
||||||
Usage Breakdown
|
|
||||||
</span>
|
|
||||||
<TrendingUp className="text-muted-foreground h-4 w-4" />
|
|
||||||
</div>
|
|
||||||
<div className="space-y-2">
|
|
||||||
<div className="flex justify-between text-sm">
|
|
||||||
<span className="text-muted-foreground">Facts</span>
|
|
||||||
<span className="font-medium">
|
|
||||||
{usageSummary.usage.episodes}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<div className="flex justify-between text-sm">
|
|
||||||
<span className="text-muted-foreground">Searches</span>
|
|
||||||
<span className="font-medium">
|
|
||||||
{usageSummary.usage.searches}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<div className="flex justify-between text-sm">
|
|
||||||
<span className="text-muted-foreground">Chat</span>
|
|
||||||
<span className="font-medium">{usageSummary.usage.chat}</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</Card>
|
|
||||||
|
|
||||||
{/* Billing Cycle */}
|
|
||||||
<Card className="p-6">
|
|
||||||
<div className="mb-2 flex items-center justify-between">
|
|
||||||
<span className="text-muted-foreground text-sm">
|
|
||||||
Billing Cycle
|
|
||||||
</span>
|
|
||||||
<Calendar className="text-muted-foreground h-4 w-4" />
|
|
||||||
</div>
|
|
||||||
<div className="mb-2">
|
|
||||||
<span className="text-3xl font-bold">
|
|
||||||
{usageSummary.billingCycle.daysRemaining}
|
|
||||||
</span>
|
|
||||||
<span className="text-muted-foreground"> days left</span>
|
|
||||||
</div>
|
|
||||||
<p className="text-muted-foreground text-xs">
|
|
||||||
Resets on{" "}
|
|
||||||
{new Date(usageSummary.billingCycle.end).toLocaleDateString()}
|
|
||||||
</p>
|
|
||||||
</Card>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Overage Warning */}
|
|
||||||
{usageSummary.credits.overage > 0 && (
|
|
||||||
<Card className="mt-4 border-orange-500 bg-orange-50 p-4 dark:bg-orange-950">
|
|
||||||
<div className="flex items-start gap-3">
|
|
||||||
<AlertCircle className="h-5 w-5 text-orange-600 dark:text-orange-400" />
|
|
||||||
<div>
|
|
||||||
<h3 className="font-semibold text-orange-900 dark:text-orange-100">
|
|
||||||
Overage Usage Detected
|
|
||||||
</h3>
|
|
||||||
<p className="text-sm text-orange-700 dark:text-orange-300">
|
|
||||||
You've used {usageSummary.credits.overage} additional credits
|
|
||||||
beyond your monthly allocation.
|
|
||||||
{usageSummary.overage.enabled &&
|
|
||||||
usageSummary.overage.pricePerCredit && (
|
|
||||||
<>
|
|
||||||
{" "}
|
|
||||||
This will cost $
|
|
||||||
{(
|
|
||||||
usageSummary.credits.overage *
|
|
||||||
usageSummary.overage.pricePerCredit
|
|
||||||
).toFixed(2)}{" "}
|
|
||||||
extra this month.
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</Card>
|
||||||
</div>
|
|
||||||
</Card>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Plan Section */}
|
{/* Usage Breakdown */}
|
||||||
<div className="mb-8">
|
<Card className="p-6">
|
||||||
<div className="mb-4 flex items-center justify-between">
|
<div className="mb-2 flex items-center justify-between">
|
||||||
<h2 className="text-lg font-semibold">Plan</h2>
|
<span className="text-muted-foreground text-sm">
|
||||||
<Button variant="secondary" onClick={() => setShowPlansModal(true)}>
|
Usage Breakdown
|
||||||
View All Plans
|
</span>
|
||||||
</Button>
|
<TrendingUp className="text-muted-foreground h-4 w-4" />
|
||||||
</div>
|
</div>
|
||||||
|
<div className="space-y-2">
|
||||||
<Card className="p-6">
|
<div className="flex justify-between text-sm">
|
||||||
<div className="flex items-center justify-between">
|
<span className="text-muted-foreground">Facts</span>
|
||||||
<div>
|
<span className="font-medium">
|
||||||
<div className="mb-2 flex items-center gap-2">
|
{usageSummary.usage.episodes}
|
||||||
<h3 className="text-xl font-bold">{usageSummary.plan.name}</h3>
|
</span>
|
||||||
<Badge
|
|
||||||
variant={
|
|
||||||
usageSummary.plan.type === "FREE" ? "secondary" : "default"
|
|
||||||
}
|
|
||||||
className="rounded"
|
|
||||||
>
|
|
||||||
{usageSummary.plan.type}
|
|
||||||
</Badge>
|
|
||||||
</div>
|
|
||||||
<p className="text-muted-foreground text-sm">
|
|
||||||
{usageSummary.credits.monthly} credits/month
|
|
||||||
{usageSummary.overage.enabled && (
|
|
||||||
<> + ${usageSummary.overage.pricePerCredit}/credit overage</>
|
|
||||||
)}
|
|
||||||
</p>
|
|
||||||
{subscription?.status === "CANCELED" &&
|
|
||||||
subscription.planType !== "FREE" && (
|
|
||||||
<div className="mt-3 flex items-start gap-2 rounded-md bg-orange-50 p-3 dark:bg-orange-950">
|
|
||||||
<AlertCircle className="mt-0.5 h-4 w-4 text-orange-600 dark:text-orange-400" />
|
|
||||||
<p className="text-sm text-orange-700 dark:text-orange-300">
|
|
||||||
Downgrading to FREE plan on{" "}
|
|
||||||
<strong>
|
|
||||||
{new Date(
|
|
||||||
subscription.currentPeriodEnd,
|
|
||||||
).toLocaleDateString()}
|
|
||||||
</strong>
|
|
||||||
. Your current credits and plan will remain active until
|
|
||||||
then.
|
|
||||||
</p>
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
<div className="flex justify-between text-sm">
|
||||||
|
<span className="text-muted-foreground">Searches</span>
|
||||||
|
<span className="font-medium">
|
||||||
|
{usageSummary.usage.searches}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className="flex justify-between text-sm">
|
||||||
|
<span className="text-muted-foreground">Chat</span>
|
||||||
|
<span className="font-medium">
|
||||||
|
{usageSummary.usage.chat}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Billing Cycle */}
|
||||||
|
<Card className="p-6">
|
||||||
|
<div className="mb-2 flex items-center justify-between">
|
||||||
|
<span className="text-muted-foreground text-sm">
|
||||||
|
Billing Cycle
|
||||||
|
</span>
|
||||||
|
<Calendar className="text-muted-foreground h-4 w-4" />
|
||||||
|
</div>
|
||||||
|
<div className="mb-2">
|
||||||
|
<span className="text-3xl font-bold">
|
||||||
|
{usageSummary.billingCycle.daysRemaining}
|
||||||
|
</span>
|
||||||
|
<span className="text-muted-foreground"> days left</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-muted-foreground text-xs">
|
||||||
|
Resets on{" "}
|
||||||
|
{new Date(usageSummary.billingCycle.end).toLocaleDateString()}
|
||||||
|
</p>
|
||||||
|
</Card>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
|
||||||
</Card>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Invoices Section */}
|
{/* Overage Warning */}
|
||||||
<div className="mb-8">
|
{usageSummary.credits.overage > 0 && (
|
||||||
<h2 className="mb-4 text-lg font-semibold">Invoices</h2>
|
<Card className="mt-4 border-orange-500 bg-orange-50 p-4 dark:bg-orange-950">
|
||||||
|
<div className="flex items-start gap-3">
|
||||||
{billingHistory.length === 0 ? (
|
<AlertCircle className="h-5 w-5 text-orange-600 dark:text-orange-400" />
|
||||||
<Card className="p-6">
|
|
||||||
<p className="text-muted-foreground text-center">No invoices yet</p>
|
|
||||||
</Card>
|
|
||||||
) : (
|
|
||||||
<Card>
|
|
||||||
<div className="divide-y">
|
|
||||||
{billingHistory.map((invoice) => (
|
|
||||||
<div
|
|
||||||
key={invoice.id}
|
|
||||||
className="flex items-center justify-between p-4"
|
|
||||||
>
|
|
||||||
<div>
|
<div>
|
||||||
<p className="font-medium">
|
<h3 className="font-semibold text-orange-900 dark:text-orange-100">
|
||||||
{new Date(invoice.periodStart).toLocaleDateString()} -{" "}
|
Overage Usage Detected
|
||||||
{new Date(invoice.periodEnd).toLocaleDateString()}
|
</h3>
|
||||||
|
<p className="text-sm text-orange-700 dark:text-orange-300">
|
||||||
|
You've used {usageSummary.credits.overage} additional
|
||||||
|
credits beyond your monthly allocation.
|
||||||
|
{usageSummary.overage.enabled &&
|
||||||
|
usageSummary.overage.pricePerCredit && (
|
||||||
|
<>
|
||||||
|
{" "}
|
||||||
|
This will cost $
|
||||||
|
{(
|
||||||
|
usageSummary.credits.overage *
|
||||||
|
usageSummary.overage.pricePerCredit
|
||||||
|
).toFixed(2)}{" "}
|
||||||
|
extra this month.
|
||||||
|
</>
|
||||||
|
)}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<div className="text-right">
|
</div>
|
||||||
<p className="font-bold">
|
</Card>
|
||||||
${invoice.totalAmount.toFixed(2)}
|
)}
|
||||||
</p>
|
</div>
|
||||||
|
|
||||||
|
{/* Plan Section */}
|
||||||
|
<div className="mb-8">
|
||||||
|
<div className="mb-4 flex items-center justify-between">
|
||||||
|
<h2 className="text-lg font-semibold">Plan</h2>
|
||||||
|
<Button
|
||||||
|
variant="secondary"
|
||||||
|
onClick={() => setShowPlansModal(true)}
|
||||||
|
>
|
||||||
|
View All Plans
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Card className="p-6">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<div>
|
||||||
|
<div className="mb-2 flex items-center gap-2">
|
||||||
|
<h3 className="text-xl font-bold">
|
||||||
|
{usageSummary.plan.name}
|
||||||
|
</h3>
|
||||||
<Badge
|
<Badge
|
||||||
variant={
|
variant={
|
||||||
invoice.stripePaymentStatus === "paid"
|
usageSummary.plan.type === "FREE"
|
||||||
? "default"
|
? "secondary"
|
||||||
: "destructive"
|
: "default"
|
||||||
}
|
}
|
||||||
className="rounded"
|
className="rounded"
|
||||||
>
|
>
|
||||||
{invoice.stripePaymentStatus || "pending"}
|
{usageSummary.plan.type}
|
||||||
</Badge>
|
</Badge>
|
||||||
</div>
|
</div>
|
||||||
|
<p className="text-muted-foreground text-sm">
|
||||||
|
{usageSummary.credits.monthly} credits/month
|
||||||
|
{usageSummary.overage.enabled && (
|
||||||
|
<>
|
||||||
|
{" "}
|
||||||
|
+ ${usageSummary.overage.pricePerCredit}/credit overage
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</p>
|
||||||
|
{subscription?.status === "CANCELED" &&
|
||||||
|
subscription.planType !== "FREE" && (
|
||||||
|
<div className="mt-3 flex items-start gap-2 rounded-md bg-orange-50 p-3 dark:bg-orange-950">
|
||||||
|
<AlertCircle className="mt-0.5 h-4 w-4 text-orange-600 dark:text-orange-400" />
|
||||||
|
<p className="text-sm text-orange-700 dark:text-orange-300">
|
||||||
|
Downgrading to FREE plan on{" "}
|
||||||
|
<strong>
|
||||||
|
{new Date(
|
||||||
|
subscription.currentPeriodEnd,
|
||||||
|
).toLocaleDateString()}
|
||||||
|
</strong>
|
||||||
|
. Your current credits and plan will remain active
|
||||||
|
until then.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
))}
|
</div>
|
||||||
</div>
|
</Card>
|
||||||
</Card>
|
</div>
|
||||||
)}
|
|
||||||
</div>
|
{/* Invoices Section */}
|
||||||
|
<div className="mb-8">
|
||||||
|
<h2 className="mb-4 text-lg font-semibold">Invoices</h2>
|
||||||
|
|
||||||
|
{billingHistory.length === 0 ? (
|
||||||
|
<Card className="p-6">
|
||||||
|
<p className="text-muted-foreground text-center">
|
||||||
|
No invoices yet
|
||||||
|
</p>
|
||||||
|
</Card>
|
||||||
|
) : (
|
||||||
|
<Card>
|
||||||
|
<div className="divide-y">
|
||||||
|
{billingHistory.map((invoice) => (
|
||||||
|
<div
|
||||||
|
key={invoice.id}
|
||||||
|
className="flex items-center justify-between p-4"
|
||||||
|
>
|
||||||
|
<div>
|
||||||
|
<p className="font-medium">
|
||||||
|
{new Date(invoice.periodStart).toLocaleDateString()} -{" "}
|
||||||
|
{new Date(invoice.periodEnd).toLocaleDateString()}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div className="text-right">
|
||||||
|
<p className="font-bold">
|
||||||
|
${invoice.totalAmount.toFixed(2)}
|
||||||
|
</p>
|
||||||
|
<Badge
|
||||||
|
variant={
|
||||||
|
invoice.stripePaymentStatus === "paid"
|
||||||
|
? "default"
|
||||||
|
: "destructive"
|
||||||
|
}
|
||||||
|
className="rounded"
|
||||||
|
>
|
||||||
|
{invoice.stripePaymentStatus || "pending"}
|
||||||
|
</Badge>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
</SettingSection>
|
||||||
|
|
||||||
{/* Plans Modal */}
|
{/* Plans Modal */}
|
||||||
<Dialog open={showPlansModal} onOpenChange={setShowPlansModal}>
|
<Dialog open={showPlansModal} onOpenChange={setShowPlansModal}>
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import { ArrowLeft, Code, Webhook, Cable, CreditCard } from "lucide-react";
|
import { ArrowLeft, Code, Webhook, Cable, CreditCard, User } from "lucide-react";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
Sidebar,
|
Sidebar,
|
||||||
@ -41,6 +41,7 @@ export default function Settings() {
|
|||||||
const data = {
|
const data = {
|
||||||
nav: [
|
nav: [
|
||||||
// { name: "Workspace", icon: Building },
|
// { name: "Workspace", icon: Building },
|
||||||
|
{ name: "Account", icon: User },
|
||||||
{ name: "Billing", icon: CreditCard },
|
{ name: "Billing", icon: CreditCard },
|
||||||
{ name: "API", icon: Code },
|
{ name: "API", icon: Code },
|
||||||
{ name: "Webhooks", icon: Webhook },
|
{ name: "Webhooks", icon: Webhook },
|
||||||
|
|||||||
107
apps/webapp/app/services/bertTopicAnalysis.server.ts
Normal file
107
apps/webapp/app/services/bertTopicAnalysis.server.ts
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
import { prisma } from "~/trigger/utils/prisma";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import { runQuery } from "~/lib/neo4j.server";
|
||||||
|
|
||||||
|
interface WorkspaceMetadata {
|
||||||
|
lastTopicAnalysisAt?: string;
|
||||||
|
[key: string]: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if we should trigger a BERT topic analysis for this workspace
|
||||||
|
* Criteria: 20+ new episodes since last analysis (or no previous analysis)
|
||||||
|
*/
|
||||||
|
export async function shouldTriggerTopicAnalysis(
|
||||||
|
userId: string,
|
||||||
|
workspaceId: string,
|
||||||
|
): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
// Get workspace metadata
|
||||||
|
const workspace = await prisma.workspace.findUnique({
|
||||||
|
where: { id: workspaceId },
|
||||||
|
select: { metadata: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!workspace) {
|
||||||
|
logger.warn(`Workspace not found: ${workspaceId}`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadata = (workspace.metadata || {}) as WorkspaceMetadata;
|
||||||
|
const lastAnalysisAt = metadata.lastTopicAnalysisAt;
|
||||||
|
|
||||||
|
// Count episodes since last analysis
|
||||||
|
const query = lastAnalysisAt
|
||||||
|
? `
|
||||||
|
MATCH (e:Episode {userId: $userId})
|
||||||
|
WHERE e.createdAt > datetime($lastAnalysisAt)
|
||||||
|
RETURN count(e) as newEpisodeCount
|
||||||
|
`
|
||||||
|
: `
|
||||||
|
MATCH (e:Episode {userId: $userId})
|
||||||
|
RETURN count(e) as totalEpisodeCount
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await runQuery(query, {
|
||||||
|
userId,
|
||||||
|
lastAnalysisAt,
|
||||||
|
});
|
||||||
|
|
||||||
|
const episodeCount = lastAnalysisAt
|
||||||
|
? result[0]?.get("newEpisodeCount")?.toNumber() || 0
|
||||||
|
: result[0]?.get("totalEpisodeCount")?.toNumber() || 0;
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`[Topic Analysis Check] User: ${userId}, New episodes: ${episodeCount}, Last analysis: ${lastAnalysisAt || "never"}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Trigger if 20+ new episodes
|
||||||
|
return episodeCount >= 20;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(
|
||||||
|
`[Topic Analysis Check] Error checking episode count:`,
|
||||||
|
error,
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update workspace metadata with last topic analysis timestamp
|
||||||
|
*/
|
||||||
|
export async function updateLastTopicAnalysisTime(
|
||||||
|
workspaceId: string,
|
||||||
|
): Promise<void> {
|
||||||
|
try {
|
||||||
|
const workspace = await prisma.workspace.findUnique({
|
||||||
|
where: { id: workspaceId },
|
||||||
|
select: { metadata: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!workspace) {
|
||||||
|
logger.warn(`Workspace not found: ${workspaceId}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadata = (workspace.metadata || {}) as WorkspaceMetadata;
|
||||||
|
|
||||||
|
await prisma.workspace.update({
|
||||||
|
where: { id: workspaceId },
|
||||||
|
data: {
|
||||||
|
metadata: {
|
||||||
|
...metadata,
|
||||||
|
lastTopicAnalysisAt: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`[Topic Analysis] Updated last analysis timestamp for workspace: ${workspaceId}`,
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(
|
||||||
|
`[Topic Analysis] Error updating last analysis timestamp:`,
|
||||||
|
error,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
@ -1,11 +1,9 @@
|
|||||||
import { UserTypeEnum } from "@core/types";
|
import { UserTypeEnum } from "@core/types";
|
||||||
|
|
||||||
import { auth, runs, tasks } from "@trigger.dev/sdk/v3";
|
|
||||||
import { prisma } from "~/db.server";
|
import { prisma } from "~/db.server";
|
||||||
import { createConversationTitle } from "~/trigger/conversation/create-conversation-title";
|
|
||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { type ConversationHistory } from "@prisma/client";
|
import { trackFeatureUsage } from "~/services/telemetry.server";
|
||||||
|
|
||||||
export const CreateConversationSchema = z.object({
|
export const CreateConversationSchema = z.object({
|
||||||
message: z.string(),
|
message: z.string(),
|
||||||
@ -44,20 +42,10 @@ export async function createConversation(
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const context = await getConversationContext(conversationHistory.id);
|
// Track conversation message
|
||||||
const handler = await tasks.trigger(
|
trackFeatureUsage("conversation_message_sent", userId).catch(console.error);
|
||||||
"chat",
|
|
||||||
{
|
|
||||||
conversationHistoryId: conversationHistory.id,
|
|
||||||
conversationId: conversationHistory.conversation.id,
|
|
||||||
context,
|
|
||||||
},
|
|
||||||
{ tags: [conversationHistory.id, workspaceId, conversationId] },
|
|
||||||
);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id: handler.id,
|
|
||||||
token: handler.publicAccessToken,
|
|
||||||
conversationId: conversationHistory.conversation.id,
|
conversationId: conversationHistory.conversation.id,
|
||||||
conversationHistoryId: conversationHistory.id,
|
conversationHistoryId: conversationHistory.id,
|
||||||
};
|
};
|
||||||
@ -84,40 +72,20 @@ export async function createConversation(
|
|||||||
});
|
});
|
||||||
|
|
||||||
const conversationHistory = conversation.ConversationHistory[0];
|
const conversationHistory = conversation.ConversationHistory[0];
|
||||||
const context = await getConversationContext(conversationHistory.id);
|
|
||||||
|
|
||||||
// Trigger conversation title task
|
// Track new conversation creation
|
||||||
await tasks.trigger<typeof createConversationTitle>(
|
trackFeatureUsage("conversation_created", userId).catch(console.error);
|
||||||
createConversationTitle.id,
|
|
||||||
{
|
|
||||||
conversationId: conversation.id,
|
|
||||||
message: conversationData.message,
|
|
||||||
},
|
|
||||||
{ tags: [conversation.id, workspaceId] },
|
|
||||||
);
|
|
||||||
|
|
||||||
const handler = await tasks.trigger(
|
|
||||||
"chat",
|
|
||||||
{
|
|
||||||
conversationHistoryId: conversationHistory.id,
|
|
||||||
conversationId: conversation.id,
|
|
||||||
context,
|
|
||||||
},
|
|
||||||
{ tags: [conversationHistory.id, workspaceId, conversation.id] },
|
|
||||||
);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id: handler.id,
|
|
||||||
token: handler.publicAccessToken,
|
|
||||||
conversationId: conversation.id,
|
conversationId: conversation.id,
|
||||||
conversationHistoryId: conversationHistory.id,
|
conversationHistoryId: conversationHistory.id,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get a conversation by ID
|
// Get a conversation by ID
|
||||||
export async function getConversation(conversationId: string) {
|
export async function getConversation(conversationId: string, userId: string) {
|
||||||
return prisma.conversation.findUnique({
|
return prisma.conversation.findUnique({
|
||||||
where: { id: conversationId },
|
where: { id: conversationId, userId },
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -139,135 +107,6 @@ export async function readConversation(conversationId: string) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getCurrentConversationRun(
|
|
||||||
conversationId: string,
|
|
||||||
workspaceId: string,
|
|
||||||
) {
|
|
||||||
const conversationHistory = await prisma.conversationHistory.findFirst({
|
|
||||||
where: {
|
|
||||||
conversationId,
|
|
||||||
conversation: {
|
|
||||||
workspaceId,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
orderBy: {
|
|
||||||
updatedAt: "desc",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!conversationHistory) {
|
|
||||||
throw new Error("No run found");
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await runs.list({
|
|
||||||
tag: [conversationId, conversationHistory.id],
|
|
||||||
status: ["QUEUED", "EXECUTING"],
|
|
||||||
limit: 1,
|
|
||||||
});
|
|
||||||
|
|
||||||
const run = response.data[0];
|
|
||||||
if (!run) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
const publicToken = await auth.createPublicToken({
|
|
||||||
scopes: {
|
|
||||||
read: {
|
|
||||||
runs: [run.id],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
id: run.id,
|
|
||||||
token: publicToken,
|
|
||||||
conversationId,
|
|
||||||
conversationHistoryId: conversationHistory.id,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function stopConversation(
|
|
||||||
conversationId: string,
|
|
||||||
workspaceId: string,
|
|
||||||
) {
|
|
||||||
const conversationHistory = await prisma.conversationHistory.findFirst({
|
|
||||||
where: {
|
|
||||||
conversationId,
|
|
||||||
conversation: {
|
|
||||||
workspaceId,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
orderBy: {
|
|
||||||
updatedAt: "desc",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!conversationHistory) {
|
|
||||||
throw new Error("No run found");
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await runs.list({
|
|
||||||
tag: [conversationId, conversationHistory.id],
|
|
||||||
status: ["QUEUED", "EXECUTING"],
|
|
||||||
limit: 1,
|
|
||||||
});
|
|
||||||
|
|
||||||
const run = response.data[0];
|
|
||||||
if (!run) {
|
|
||||||
await prisma.conversation.update({
|
|
||||||
where: {
|
|
||||||
id: conversationId,
|
|
||||||
},
|
|
||||||
data: {
|
|
||||||
status: "failed",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
return await runs.cancel(run.id);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getConversationContext(
|
|
||||||
conversationHistoryId: string,
|
|
||||||
): Promise<{
|
|
||||||
previousHistory: ConversationHistory[];
|
|
||||||
}> {
|
|
||||||
const conversationHistory = await prisma.conversationHistory.findUnique({
|
|
||||||
where: { id: conversationHistoryId },
|
|
||||||
include: { conversation: true },
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!conversationHistory) {
|
|
||||||
return {
|
|
||||||
previousHistory: [],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get previous conversation history message and response
|
|
||||||
let previousHistory: ConversationHistory[] = [];
|
|
||||||
|
|
||||||
if (conversationHistory.conversationId) {
|
|
||||||
previousHistory = await prisma.conversationHistory.findMany({
|
|
||||||
where: {
|
|
||||||
conversationId: conversationHistory.conversationId,
|
|
||||||
id: {
|
|
||||||
not: conversationHistoryId,
|
|
||||||
},
|
|
||||||
deleted: null,
|
|
||||||
},
|
|
||||||
orderBy: {
|
|
||||||
createdAt: "asc",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
previousHistory,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getConversationAndHistory = async (
|
export const getConversationAndHistory = async (
|
||||||
conversationId: string,
|
conversationId: string,
|
||||||
userId: string,
|
userId: string,
|
||||||
@ -275,6 +114,7 @@ export const getConversationAndHistory = async (
|
|||||||
const conversation = await prisma.conversation.findFirst({
|
const conversation = await prisma.conversation.findFirst({
|
||||||
where: {
|
where: {
|
||||||
id: conversationId,
|
id: conversationId,
|
||||||
|
userId,
|
||||||
},
|
},
|
||||||
include: {
|
include: {
|
||||||
ConversationHistory: true,
|
ConversationHistory: true,
|
||||||
@ -284,13 +124,32 @@ export const getConversationAndHistory = async (
|
|||||||
return conversation;
|
return conversation;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const createConversationHistory = async (
|
||||||
|
userMessage: string,
|
||||||
|
conversationId: string,
|
||||||
|
userType: UserTypeEnum,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
thoughts?: Record<string, any>,
|
||||||
|
) => {
|
||||||
|
return await prisma.conversationHistory.create({
|
||||||
|
data: {
|
||||||
|
conversationId,
|
||||||
|
message: userMessage,
|
||||||
|
thoughts,
|
||||||
|
userType,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
export const GetConversationsListSchema = z.object({
|
export const GetConversationsListSchema = z.object({
|
||||||
page: z.string().optional().default("1"),
|
page: z.string().optional().default("1"),
|
||||||
limit: z.string().optional().default("20"),
|
limit: z.string().optional().default("20"),
|
||||||
search: z.string().optional(),
|
search: z.string().optional(),
|
||||||
});
|
});
|
||||||
|
|
||||||
export type GetConversationsListDto = z.infer<typeof GetConversationsListSchema>;
|
export type GetConversationsListDto = z.infer<
|
||||||
|
typeof GetConversationsListSchema
|
||||||
|
>;
|
||||||
|
|
||||||
export async function getConversationsList(
|
export async function getConversationsList(
|
||||||
workspaceId: string,
|
workspaceId: string,
|
||||||
|
|||||||
319
apps/webapp/app/services/graphModels/compactedSession.ts
Normal file
319
apps/webapp/app/services/graphModels/compactedSession.ts
Normal file
@ -0,0 +1,319 @@
|
|||||||
|
import { runQuery } from "~/lib/neo4j.server";
|
||||||
|
|
||||||
|
export interface SessionEpisodeData {
|
||||||
|
uuid: string;
|
||||||
|
content: string;
|
||||||
|
originalContent: string;
|
||||||
|
source: string;
|
||||||
|
createdAt: Date;
|
||||||
|
validAt: Date;
|
||||||
|
metadata: any;
|
||||||
|
sessionId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CompactedSessionNode {
|
||||||
|
uuid: string;
|
||||||
|
sessionId: string;
|
||||||
|
summary: string;
|
||||||
|
summaryEmbedding: number[];
|
||||||
|
episodeCount: number;
|
||||||
|
startTime: Date;
|
||||||
|
endTime: Date;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt?: Date;
|
||||||
|
confidence: number;
|
||||||
|
userId: string;
|
||||||
|
source: string;
|
||||||
|
compressionRatio: number;
|
||||||
|
metadata: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save or update a compacted session
|
||||||
|
*/
|
||||||
|
export async function saveCompactedSession(
|
||||||
|
compact: CompactedSessionNode
|
||||||
|
): Promise<string> {
|
||||||
|
const query = `
|
||||||
|
MERGE (cs:CompactedSession {uuid: $uuid})
|
||||||
|
ON CREATE SET
|
||||||
|
cs.sessionId = $sessionId,
|
||||||
|
cs.summary = $summary,
|
||||||
|
cs.summaryEmbedding = $summaryEmbedding,
|
||||||
|
cs.episodeCount = $episodeCount,
|
||||||
|
cs.startTime = $startTime,
|
||||||
|
cs.endTime = $endTime,
|
||||||
|
cs.createdAt = $createdAt,
|
||||||
|
cs.confidence = $confidence,
|
||||||
|
cs.userId = $userId,
|
||||||
|
cs.source = $source,
|
||||||
|
cs.compressionRatio = $compressionRatio,
|
||||||
|
cs.metadata = $metadata
|
||||||
|
ON MATCH SET
|
||||||
|
cs.summary = $summary,
|
||||||
|
cs.summaryEmbedding = $summaryEmbedding,
|
||||||
|
cs.episodeCount = $episodeCount,
|
||||||
|
cs.endTime = $endTime,
|
||||||
|
cs.updatedAt = $updatedAt,
|
||||||
|
cs.confidence = $confidence,
|
||||||
|
cs.compressionRatio = $compressionRatio,
|
||||||
|
cs.metadata = $metadata
|
||||||
|
RETURN cs.uuid as uuid
|
||||||
|
`;
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
uuid: compact.uuid,
|
||||||
|
sessionId: compact.sessionId,
|
||||||
|
summary: compact.summary,
|
||||||
|
summaryEmbedding: compact.summaryEmbedding,
|
||||||
|
episodeCount: compact.episodeCount,
|
||||||
|
startTime: compact.startTime.toISOString(),
|
||||||
|
endTime: compact.endTime.toISOString(),
|
||||||
|
createdAt: compact.createdAt.toISOString(),
|
||||||
|
updatedAt: compact.updatedAt?.toISOString() || null,
|
||||||
|
confidence: compact.confidence,
|
||||||
|
userId: compact.userId,
|
||||||
|
source: compact.source,
|
||||||
|
compressionRatio: compact.compressionRatio,
|
||||||
|
metadata: JSON.stringify(compact.metadata || {}),
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await runQuery(query, params);
|
||||||
|
return result[0].get("uuid");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a compacted session by UUID
|
||||||
|
*/
|
||||||
|
export async function getCompactedSession(
|
||||||
|
uuid: string
|
||||||
|
): Promise<CompactedSessionNode | null> {
|
||||||
|
const query = `
|
||||||
|
MATCH (cs:CompactedSession {uuid: $uuid})
|
||||||
|
RETURN cs
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await runQuery(query, { uuid });
|
||||||
|
if (result.length === 0) return null;
|
||||||
|
|
||||||
|
const compact = result[0].get("cs").properties;
|
||||||
|
return parseCompactedSessionNode(compact);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get compacted session by sessionId
|
||||||
|
*/
|
||||||
|
export async function getCompactedSessionBySessionId(
|
||||||
|
sessionId: string,
|
||||||
|
userId: string
|
||||||
|
): Promise<CompactedSessionNode | null> {
|
||||||
|
const query = `
|
||||||
|
MATCH (cs:CompactedSession {sessionId: $sessionId, userId: $userId})
|
||||||
|
RETURN cs
|
||||||
|
ORDER BY cs.endTime DESC
|
||||||
|
LIMIT 1
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await runQuery(query, { sessionId, userId });
|
||||||
|
if (result.length === 0) return null;
|
||||||
|
|
||||||
|
const compact = result[0].get("cs").properties;
|
||||||
|
return parseCompactedSessionNode(compact);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all episodes linked to a compacted session
|
||||||
|
*/
|
||||||
|
export async function getCompactedSessionEpisodes(
|
||||||
|
compactUuid: string
|
||||||
|
): Promise<string[]> {
|
||||||
|
const query = `
|
||||||
|
MATCH (cs:CompactedSession {uuid: $compactUuid})-[:COMPACTS]->(e:Episode)
|
||||||
|
RETURN e.uuid as episodeUuid
|
||||||
|
ORDER BY e.createdAt ASC
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await runQuery(query, { compactUuid });
|
||||||
|
return result.map((r) => r.get("episodeUuid"));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Link episodes to compacted session
|
||||||
|
*/
|
||||||
|
export async function linkEpisodesToCompact(
|
||||||
|
compactUuid: string,
|
||||||
|
episodeUuids: string[],
|
||||||
|
userId: string
|
||||||
|
): Promise<void> {
|
||||||
|
const query = `
|
||||||
|
MATCH (cs:CompactedSession {uuid: $compactUuid, userId: $userId})
|
||||||
|
UNWIND $episodeUuids as episodeUuid
|
||||||
|
MATCH (e:Episode {uuid: episodeUuid, userId: $userId})
|
||||||
|
MERGE (cs)-[:COMPACTS {createdAt: datetime()}]->(e)
|
||||||
|
MERGE (e)-[:COMPACTED_INTO {createdAt: datetime()}]->(cs)
|
||||||
|
`;
|
||||||
|
|
||||||
|
await runQuery(query, { compactUuid, episodeUuids, userId });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search compacted sessions by embedding similarity
|
||||||
|
*/
|
||||||
|
export async function searchCompactedSessionsByEmbedding(
|
||||||
|
embedding: number[],
|
||||||
|
userId: string,
|
||||||
|
limit: number = 10,
|
||||||
|
minScore: number = 0.7
|
||||||
|
): Promise<Array<{ compact: CompactedSessionNode; score: number }>> {
|
||||||
|
const query = `
|
||||||
|
MATCH (cs:CompactedSession {userId: $userId})
|
||||||
|
WHERE cs.summaryEmbedding IS NOT NULL
|
||||||
|
WITH cs,
|
||||||
|
gds.similarity.cosine(cs.summaryEmbedding, $embedding) AS score
|
||||||
|
WHERE score >= $minScore
|
||||||
|
RETURN cs, score
|
||||||
|
ORDER BY score DESC
|
||||||
|
LIMIT $limit
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await runQuery(query, {
|
||||||
|
embedding,
|
||||||
|
userId,
|
||||||
|
limit,
|
||||||
|
minScore,
|
||||||
|
});
|
||||||
|
|
||||||
|
return result.map((r) => ({
|
||||||
|
compact: parseCompactedSessionNode(r.get("cs").properties),
|
||||||
|
score: r.get("score"),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get compacted sessions for a user
|
||||||
|
*/
|
||||||
|
export async function getUserCompactedSessions(
|
||||||
|
userId: string,
|
||||||
|
limit: number = 50
|
||||||
|
): Promise<CompactedSessionNode[]> {
|
||||||
|
const query = `
|
||||||
|
MATCH (cs:CompactedSession {userId: $userId})
|
||||||
|
RETURN cs
|
||||||
|
ORDER BY cs.endTime DESC
|
||||||
|
LIMIT $limit
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await runQuery(query, { userId, limit });
|
||||||
|
return result.map((r) => parseCompactedSessionNode(r.get("cs").properties));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a compacted session
|
||||||
|
*/
|
||||||
|
export async function deleteCompactedSession(uuid: string): Promise<void> {
|
||||||
|
const query = `
|
||||||
|
MATCH (cs:CompactedSession {uuid: $uuid})
|
||||||
|
DETACH DELETE cs
|
||||||
|
`;
|
||||||
|
|
||||||
|
await runQuery(query, { uuid });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get compaction statistics for a user
|
||||||
|
*/
|
||||||
|
export async function getCompactionStats(userId: string): Promise<{
|
||||||
|
totalCompacts: number;
|
||||||
|
totalEpisodes: number;
|
||||||
|
averageCompressionRatio: number;
|
||||||
|
mostRecentCompaction: Date | null;
|
||||||
|
}> {
|
||||||
|
const query = `
|
||||||
|
MATCH (cs:CompactedSession {userId: $userId})
|
||||||
|
RETURN
|
||||||
|
count(cs) as totalCompacts,
|
||||||
|
sum(cs.episodeCount) as totalEpisodes,
|
||||||
|
avg(cs.compressionRatio) as avgCompressionRatio,
|
||||||
|
max(cs.endTime) as mostRecent
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await runQuery(query, { userId });
|
||||||
|
if (result.length === 0) {
|
||||||
|
return {
|
||||||
|
totalCompacts: 0,
|
||||||
|
totalEpisodes: 0,
|
||||||
|
averageCompressionRatio: 0,
|
||||||
|
mostRecentCompaction: null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const stats = result[0];
|
||||||
|
return {
|
||||||
|
totalCompacts: stats.get("totalCompacts")?.toNumber() || 0,
|
||||||
|
totalEpisodes: stats.get("totalEpisodes")?.toNumber() || 0,
|
||||||
|
averageCompressionRatio: stats.get("avgCompressionRatio") || 0,
|
||||||
|
mostRecentCompaction: stats.get("mostRecent")
|
||||||
|
? new Date(stats.get("mostRecent"))
|
||||||
|
: null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all episodes for a session
|
||||||
|
*/
|
||||||
|
export async function getSessionEpisodes(
|
||||||
|
sessionId: string,
|
||||||
|
userId: string,
|
||||||
|
afterTime?: Date
|
||||||
|
): Promise<SessionEpisodeData[]> {
|
||||||
|
const query = `
|
||||||
|
MATCH (e:Episode {sessionId: $sessionId, userId: $userId})
|
||||||
|
${afterTime ? "WHERE e.createdAt > datetime($afterTime)" : ""}
|
||||||
|
RETURN e
|
||||||
|
ORDER BY e.createdAt ASC
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await runQuery(query, {
|
||||||
|
sessionId,
|
||||||
|
userId,
|
||||||
|
afterTime: afterTime?.toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
return result.map((r) => r.get("e").properties);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get episode count for a session
|
||||||
|
*/
|
||||||
|
export async function getSessionEpisodeCount(
|
||||||
|
sessionId: string,
|
||||||
|
userId: string,
|
||||||
|
afterTime?: Date
|
||||||
|
): Promise<number> {
|
||||||
|
const episodes = await getSessionEpisodes(sessionId, userId, afterTime);
|
||||||
|
return episodes.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to parse raw compact node from Neo4j
|
||||||
|
*/
|
||||||
|
function parseCompactedSessionNode(raw: any): CompactedSessionNode {
|
||||||
|
return {
|
||||||
|
uuid: raw.uuid,
|
||||||
|
sessionId: raw.sessionId,
|
||||||
|
summary: raw.summary,
|
||||||
|
summaryEmbedding: raw.summaryEmbedding || [],
|
||||||
|
episodeCount: raw.episodeCount || 0,
|
||||||
|
startTime: new Date(raw.startTime),
|
||||||
|
endTime: new Date(raw.endTime),
|
||||||
|
createdAt: new Date(raw.createdAt),
|
||||||
|
updatedAt: raw.updatedAt ? new Date(raw.updatedAt) : undefined,
|
||||||
|
confidence: raw.confidence || 0,
|
||||||
|
userId: raw.userId,
|
||||||
|
source: raw.source,
|
||||||
|
compressionRatio: raw.compressionRatio || 1,
|
||||||
|
metadata: typeof raw.metadata === "string"
|
||||||
|
? JSON.parse(raw.metadata)
|
||||||
|
: raw.metadata || {},
|
||||||
|
};
|
||||||
|
}
|
||||||
@ -45,6 +45,43 @@ export async function createSpace(
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all active spaces for a user
|
||||||
|
*/
|
||||||
|
export async function getAllSpacesForUser(
|
||||||
|
userId: string,
|
||||||
|
): Promise<SpaceNode[]> {
|
||||||
|
const query = `
|
||||||
|
MATCH (s:Space {userId: $userId})
|
||||||
|
WHERE s.isActive = true
|
||||||
|
|
||||||
|
// Count episodes assigned to each space
|
||||||
|
OPTIONAL MATCH (s)-[:HAS_EPISODE]->(e:Episode {userId: $userId})
|
||||||
|
|
||||||
|
WITH s, count(e) as episodeCount
|
||||||
|
RETURN s, episodeCount
|
||||||
|
ORDER BY s.createdAt DESC
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await runQuery(query, { userId });
|
||||||
|
|
||||||
|
return result.map((record) => {
|
||||||
|
const spaceData = record.get("s").properties;
|
||||||
|
const episodeCount = record.get("episodeCount") || 0;
|
||||||
|
|
||||||
|
return {
|
||||||
|
uuid: spaceData.uuid,
|
||||||
|
name: spaceData.name,
|
||||||
|
description: spaceData.description,
|
||||||
|
userId: spaceData.userId,
|
||||||
|
createdAt: new Date(spaceData.createdAt),
|
||||||
|
updatedAt: new Date(spaceData.updatedAt),
|
||||||
|
isActive: spaceData.isActive,
|
||||||
|
contextCount: Number(episodeCount),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a specific space by ID
|
* Get a specific space by ID
|
||||||
*/
|
*/
|
||||||
|
|||||||
87
apps/webapp/app/services/jobManager.server.ts
Normal file
87
apps/webapp/app/services/jobManager.server.ts
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
/**
|
||||||
|
* Job Manager Service
|
||||||
|
*
|
||||||
|
* Unified interface for managing background jobs across both
|
||||||
|
* Trigger.dev and BullMQ queue providers.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { env } from "~/env.server";
|
||||||
|
|
||||||
|
type QueueProvider = "trigger" | "bullmq";
|
||||||
|
|
||||||
|
interface JobInfo {
|
||||||
|
id: string;
|
||||||
|
isCompleted: boolean;
|
||||||
|
status?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find running jobs by tags/identifiers
|
||||||
|
*/
|
||||||
|
export async function findRunningJobs(params: {
|
||||||
|
tags: string[];
|
||||||
|
taskIdentifier?: string;
|
||||||
|
}): Promise<JobInfo[]> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { runs } = await import("@trigger.dev/sdk");
|
||||||
|
const runningTasks = await runs.list({
|
||||||
|
tag: params.tags,
|
||||||
|
taskIdentifier: params.taskIdentifier,
|
||||||
|
});
|
||||||
|
|
||||||
|
return runningTasks.data.map((task) => ({
|
||||||
|
id: task.id,
|
||||||
|
isCompleted: task.isCompleted,
|
||||||
|
status: task.status,
|
||||||
|
}));
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { getJobsByTags } = await import("~/bullmq/utils/job-finder");
|
||||||
|
const jobs = await getJobsByTags(params.tags, params.taskIdentifier);
|
||||||
|
|
||||||
|
return jobs;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cancel a running job
|
||||||
|
*/
|
||||||
|
export async function cancelJob(jobId: string): Promise<void> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { runs } = await import("@trigger.dev/sdk");
|
||||||
|
await runs.cancel(jobId);
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { cancelJobById } = await import("~/bullmq/utils/job-finder");
|
||||||
|
await cancelJobById(jobId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get job status
|
||||||
|
*/
|
||||||
|
export async function getJobStatus(jobId: string): Promise<JobInfo | null> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { runs } = await import("@trigger.dev/sdk");
|
||||||
|
try {
|
||||||
|
const run = await runs.retrieve(jobId);
|
||||||
|
return {
|
||||||
|
id: run.id,
|
||||||
|
isCompleted: run.isCompleted,
|
||||||
|
status: run.status,
|
||||||
|
};
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { getJobById } = await import("~/bullmq/utils/job-finder");
|
||||||
|
return await getJobById(jobId);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -10,13 +10,8 @@ import {
|
|||||||
type EpisodeType,
|
type EpisodeType,
|
||||||
} from "@core/types";
|
} from "@core/types";
|
||||||
import { logger } from "./logger.service";
|
import { logger } from "./logger.service";
|
||||||
import { ClusteringService } from "./clustering.server";
|
|
||||||
import crypto from "crypto";
|
import crypto from "crypto";
|
||||||
import {
|
import { dedupeNodes, extractEntities } from "./prompts/nodes";
|
||||||
dedupeNodes,
|
|
||||||
extractAttributes,
|
|
||||||
extractEntities,
|
|
||||||
} from "./prompts/nodes";
|
|
||||||
import {
|
import {
|
||||||
extractStatements,
|
extractStatements,
|
||||||
extractStatementsOSS,
|
extractStatementsOSS,
|
||||||
@ -40,7 +35,11 @@ import {
|
|||||||
saveTriple,
|
saveTriple,
|
||||||
searchStatementsByEmbedding,
|
searchStatementsByEmbedding,
|
||||||
} from "./graphModels/statement";
|
} from "./graphModels/statement";
|
||||||
import { getEmbedding, makeModelCall, isProprietaryModel } from "~/lib/model.server";
|
import {
|
||||||
|
getEmbedding,
|
||||||
|
makeModelCall,
|
||||||
|
isProprietaryModel,
|
||||||
|
} from "~/lib/model.server";
|
||||||
import { runQuery } from "~/lib/neo4j.server";
|
import { runQuery } from "~/lib/neo4j.server";
|
||||||
import { Apps, getNodeTypesString } from "~/utils/presets/nodes";
|
import { Apps, getNodeTypesString } from "~/utils/presets/nodes";
|
||||||
import { normalizePrompt, normalizeDocumentPrompt } from "./prompts";
|
import { normalizePrompt, normalizeDocumentPrompt } from "./prompts";
|
||||||
@ -50,12 +49,6 @@ import { type PrismaClient } from "@prisma/client";
|
|||||||
const DEFAULT_EPISODE_WINDOW = 5;
|
const DEFAULT_EPISODE_WINDOW = 5;
|
||||||
|
|
||||||
export class KnowledgeGraphService {
|
export class KnowledgeGraphService {
|
||||||
private clusteringService: ClusteringService;
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
this.clusteringService = new ClusteringService();
|
|
||||||
}
|
|
||||||
|
|
||||||
async getEmbedding(text: string) {
|
async getEmbedding(text: string) {
|
||||||
return getEmbedding(text);
|
return getEmbedding(text);
|
||||||
}
|
}
|
||||||
@ -419,8 +412,8 @@ export class KnowledgeGraphService {
|
|||||||
logger.log(`Processing time: ${processingTimeMs} ms`);
|
logger.log(`Processing time: ${processingTimeMs} ms`);
|
||||||
|
|
||||||
// Count only truly new statements (exclude duplicates)
|
// Count only truly new statements (exclude duplicates)
|
||||||
const newStatementsCount = updatedTriples.filter(triple =>
|
const newStatementsCount = updatedTriples.filter(
|
||||||
triple.statement.createdAt >= episode.createdAt
|
(triple) => triple.statement.createdAt >= episode.createdAt,
|
||||||
).length;
|
).length;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@ -442,7 +435,10 @@ export class KnowledgeGraphService {
|
|||||||
private async extractEntities(
|
private async extractEntities(
|
||||||
episode: EpisodicNode,
|
episode: EpisodicNode,
|
||||||
previousEpisodes: EpisodicNode[],
|
previousEpisodes: EpisodicNode[],
|
||||||
tokenMetrics: { high: { input: number; output: number; total: number }; low: { input: number; output: number; total: number } },
|
tokenMetrics: {
|
||||||
|
high: { input: number; output: number; total: number };
|
||||||
|
low: { input: number; output: number; total: number };
|
||||||
|
},
|
||||||
): Promise<EntityNode[]> {
|
): Promise<EntityNode[]> {
|
||||||
// Use the prompt library to get the appropriate prompts
|
// Use the prompt library to get the appropriate prompts
|
||||||
const context = {
|
const context = {
|
||||||
@ -460,14 +456,20 @@ export class KnowledgeGraphService {
|
|||||||
let responseText = "";
|
let responseText = "";
|
||||||
|
|
||||||
// Entity extraction requires HIGH complexity (creative reasoning, nuanced NER)
|
// Entity extraction requires HIGH complexity (creative reasoning, nuanced NER)
|
||||||
await makeModelCall(false, messages as CoreMessage[], (text, _model, usage) => {
|
await makeModelCall(
|
||||||
responseText = text;
|
false,
|
||||||
if (usage) {
|
messages as CoreMessage[],
|
||||||
tokenMetrics.high.input += usage.promptTokens;
|
(text, _model, usage) => {
|
||||||
tokenMetrics.high.output += usage.completionTokens;
|
responseText = text;
|
||||||
tokenMetrics.high.total += usage.totalTokens;
|
if (usage) {
|
||||||
}
|
tokenMetrics.high.input += usage.promptTokens;
|
||||||
}, undefined, 'high');
|
tokenMetrics.high.output += usage.completionTokens;
|
||||||
|
tokenMetrics.high.total += usage.totalTokens;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
undefined,
|
||||||
|
"high",
|
||||||
|
);
|
||||||
|
|
||||||
// Convert to EntityNode objects
|
// Convert to EntityNode objects
|
||||||
let entities: EntityNode[] = [];
|
let entities: EntityNode[] = [];
|
||||||
@ -478,19 +480,23 @@ export class KnowledgeGraphService {
|
|||||||
responseText = outputMatch[1].trim();
|
responseText = outputMatch[1].trim();
|
||||||
const parsedResponse = JSON.parse(responseText || "[]");
|
const parsedResponse = JSON.parse(responseText || "[]");
|
||||||
// Handle both old format {entities: [...]} and new format [...]
|
// Handle both old format {entities: [...]} and new format [...]
|
||||||
const extractedEntities = Array.isArray(parsedResponse) ? parsedResponse : (parsedResponse.entities || []);
|
const extractedEntities = Array.isArray(parsedResponse)
|
||||||
|
? parsedResponse
|
||||||
|
: parsedResponse.entities || [];
|
||||||
|
|
||||||
// Batch generate embeddings for entity names
|
// Batch generate embeddings for entity names
|
||||||
const entityNames = Array.isArray(extractedEntities[0]) ? extractedEntities : extractedEntities.map((entity: any) => entity.name || entity);
|
const entityNames = Array.isArray(extractedEntities[0])
|
||||||
|
? extractedEntities
|
||||||
|
: extractedEntities.map((entity: any) => entity.name || entity);
|
||||||
const nameEmbeddings = await Promise.all(
|
const nameEmbeddings = await Promise.all(
|
||||||
entityNames.map((name: string) => this.getEmbedding(name)),
|
entityNames.map((name: string) => this.getEmbedding(name)),
|
||||||
);
|
);
|
||||||
|
|
||||||
entities = extractedEntities.map((entity: any, index: number) => ({
|
entities = extractedEntities.map((entity: any, index: number) => ({
|
||||||
uuid: crypto.randomUUID(),
|
uuid: crypto.randomUUID(),
|
||||||
name: typeof entity === 'string' ? entity : entity.name,
|
name: typeof entity === "string" ? entity : entity.name,
|
||||||
type: undefined, // Type will be inferred from statements
|
type: undefined, // Type will be inferred from statements
|
||||||
attributes: typeof entity === 'string' ? {} : (entity.attributes || {}),
|
attributes: typeof entity === "string" ? {} : entity.attributes || {},
|
||||||
nameEmbedding: nameEmbeddings[index],
|
nameEmbedding: nameEmbeddings[index],
|
||||||
typeEmbedding: undefined, // No type embedding needed
|
typeEmbedding: undefined, // No type embedding needed
|
||||||
createdAt: new Date(),
|
createdAt: new Date(),
|
||||||
@ -512,7 +518,10 @@ export class KnowledgeGraphService {
|
|||||||
expanded: EntityNode[];
|
expanded: EntityNode[];
|
||||||
},
|
},
|
||||||
previousEpisodes: EpisodicNode[],
|
previousEpisodes: EpisodicNode[],
|
||||||
tokenMetrics: { high: { input: number; output: number; total: number }; low: { input: number; output: number; total: number } },
|
tokenMetrics: {
|
||||||
|
high: { input: number; output: number; total: number };
|
||||||
|
low: { input: number; output: number; total: number };
|
||||||
|
},
|
||||||
): Promise<Triple[]> {
|
): Promise<Triple[]> {
|
||||||
// Use the prompt library to get the appropriate prompts
|
// Use the prompt library to get the appropriate prompts
|
||||||
const context = {
|
const context = {
|
||||||
@ -534,22 +543,28 @@ export class KnowledgeGraphService {
|
|||||||
referenceTime: episode.validAt.toISOString(),
|
referenceTime: episode.validAt.toISOString(),
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log("proprietary model", isProprietaryModel(undefined, 'high'));
|
console.log("proprietary model", isProprietaryModel(undefined, "high"));
|
||||||
// Statement extraction requires HIGH complexity (causal reasoning, emotional context)
|
// Statement extraction requires HIGH complexity (causal reasoning, emotional context)
|
||||||
// Choose between proprietary and OSS prompts based on model type
|
// Choose between proprietary and OSS prompts based on model type
|
||||||
const messages = isProprietaryModel(undefined, 'high')
|
const messages = isProprietaryModel(undefined, "high")
|
||||||
? extractStatements(context)
|
? extractStatements(context)
|
||||||
: extractStatementsOSS(context);
|
: extractStatementsOSS(context);
|
||||||
|
|
||||||
let responseText = "";
|
let responseText = "";
|
||||||
await makeModelCall(false, messages as CoreMessage[], (text, _model, usage) => {
|
await makeModelCall(
|
||||||
responseText = text;
|
false,
|
||||||
if (usage) {
|
messages as CoreMessage[],
|
||||||
tokenMetrics.high.input += usage.promptTokens;
|
(text, _model, usage) => {
|
||||||
tokenMetrics.high.output += usage.completionTokens;
|
responseText = text;
|
||||||
tokenMetrics.high.total += usage.totalTokens;
|
if (usage) {
|
||||||
}
|
tokenMetrics.high.input += usage.promptTokens as number;
|
||||||
}, undefined, 'high');
|
tokenMetrics.high.output += usage.completionTokens as number;
|
||||||
|
tokenMetrics.high.total += usage.totalTokens as number;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
undefined,
|
||||||
|
"high",
|
||||||
|
);
|
||||||
|
|
||||||
const outputMatch = responseText.match(/<output>([\s\S]*?)<\/output>/);
|
const outputMatch = responseText.match(/<output>([\s\S]*?)<\/output>/);
|
||||||
if (outputMatch && outputMatch[1]) {
|
if (outputMatch && outputMatch[1]) {
|
||||||
@ -561,9 +576,11 @@ export class KnowledgeGraphService {
|
|||||||
// Parse the statements from the LLM response
|
// Parse the statements from the LLM response
|
||||||
const parsedResponse = JSON.parse(responseText || "[]");
|
const parsedResponse = JSON.parse(responseText || "[]");
|
||||||
// Handle both old format {"edges": [...]} and new format [...]
|
// Handle both old format {"edges": [...]} and new format [...]
|
||||||
const extractedTriples: ExtractedTripleData[] = Array.isArray(parsedResponse)
|
const extractedTriples: ExtractedTripleData[] = Array.isArray(
|
||||||
|
parsedResponse,
|
||||||
|
)
|
||||||
? parsedResponse
|
? parsedResponse
|
||||||
: (parsedResponse.edges || []);
|
: parsedResponse.edges || [];
|
||||||
|
|
||||||
console.log(`extracted triples length: ${extractedTriples.length}`);
|
console.log(`extracted triples length: ${extractedTriples.length}`);
|
||||||
|
|
||||||
@ -683,7 +700,10 @@ export class KnowledgeGraphService {
|
|||||||
triples: Triple[],
|
triples: Triple[],
|
||||||
episode: EpisodicNode,
|
episode: EpisodicNode,
|
||||||
previousEpisodes: EpisodicNode[],
|
previousEpisodes: EpisodicNode[],
|
||||||
tokenMetrics: { high: { input: number; output: number; total: number }; low: { input: number; output: number; total: number } },
|
tokenMetrics: {
|
||||||
|
high: { input: number; output: number; total: number };
|
||||||
|
low: { input: number; output: number; total: number };
|
||||||
|
},
|
||||||
): Promise<Triple[]> {
|
): Promise<Triple[]> {
|
||||||
// Step 1: Extract unique entities from triples
|
// Step 1: Extract unique entities from triples
|
||||||
const uniqueEntitiesMap = new Map<string, EntityNode>();
|
const uniqueEntitiesMap = new Map<string, EntityNode>();
|
||||||
@ -810,14 +830,20 @@ export class KnowledgeGraphService {
|
|||||||
let responseText = "";
|
let responseText = "";
|
||||||
|
|
||||||
// Entity deduplication is LOW complexity (pattern matching, similarity comparison)
|
// Entity deduplication is LOW complexity (pattern matching, similarity comparison)
|
||||||
await makeModelCall(false, messages as CoreMessage[], (text, _model, usage) => {
|
await makeModelCall(
|
||||||
responseText = text;
|
false,
|
||||||
if (usage) {
|
messages as CoreMessage[],
|
||||||
tokenMetrics.low.input += usage.promptTokens;
|
(text, _model, usage) => {
|
||||||
tokenMetrics.low.output += usage.completionTokens;
|
responseText = text;
|
||||||
tokenMetrics.low.total += usage.totalTokens;
|
if (usage) {
|
||||||
}
|
tokenMetrics.low.input += usage.promptTokens;
|
||||||
}, undefined, 'low');
|
tokenMetrics.low.output += usage.completionTokens;
|
||||||
|
tokenMetrics.low.total += usage.totalTokens;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
undefined,
|
||||||
|
"low",
|
||||||
|
);
|
||||||
|
|
||||||
// Step 5: Process LLM response
|
// Step 5: Process LLM response
|
||||||
const outputMatch = responseText.match(/<output>([\s\S]*?)<\/output>/);
|
const outputMatch = responseText.match(/<output>([\s\S]*?)<\/output>/);
|
||||||
@ -898,7 +924,10 @@ export class KnowledgeGraphService {
|
|||||||
triples: Triple[],
|
triples: Triple[],
|
||||||
episode: EpisodicNode,
|
episode: EpisodicNode,
|
||||||
previousEpisodes: EpisodicNode[],
|
previousEpisodes: EpisodicNode[],
|
||||||
tokenMetrics: { high: { input: number; output: number; total: number }; low: { input: number; output: number; total: number } },
|
tokenMetrics: {
|
||||||
|
high: { input: number; output: number; total: number };
|
||||||
|
low: { input: number; output: number; total: number };
|
||||||
|
},
|
||||||
): Promise<{
|
): Promise<{
|
||||||
resolvedStatements: Triple[];
|
resolvedStatements: Triple[];
|
||||||
invalidatedStatements: string[];
|
invalidatedStatements: string[];
|
||||||
@ -911,7 +940,10 @@ export class KnowledgeGraphService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Step 1: Collect all potential matches for all triples at once
|
// Step 1: Collect all potential matches for all triples at once
|
||||||
const allPotentialMatches: Map<string, Omit<StatementNode, "factEmbedding">[]> = new Map();
|
const allPotentialMatches: Map<
|
||||||
|
string,
|
||||||
|
Omit<StatementNode, "factEmbedding">[]
|
||||||
|
> = new Map();
|
||||||
const allExistingTripleData: Map<string, Triple> = new Map();
|
const allExistingTripleData: Map<string, Triple> = new Map();
|
||||||
|
|
||||||
// For preparing the LLM context
|
// For preparing the LLM context
|
||||||
@ -971,7 +1003,8 @@ export class KnowledgeGraphService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Phase 3: Check related memories for contradictory statements
|
// Phase 3: Check related memories for contradictory statements
|
||||||
const previousEpisodesStatements: Omit<StatementNode, "factEmbedding">[] = [];
|
const previousEpisodesStatements: Omit<StatementNode, "factEmbedding">[] =
|
||||||
|
[];
|
||||||
|
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
previousEpisodes.map(async (episode) => {
|
previousEpisodes.map(async (episode) => {
|
||||||
@ -1052,14 +1085,20 @@ export class KnowledgeGraphService {
|
|||||||
let responseText = "";
|
let responseText = "";
|
||||||
|
|
||||||
// Statement resolution is LOW complexity (rule-based duplicate/contradiction detection)
|
// Statement resolution is LOW complexity (rule-based duplicate/contradiction detection)
|
||||||
await makeModelCall(false, messages, (text, _model, usage) => {
|
await makeModelCall(
|
||||||
responseText = text;
|
false,
|
||||||
if (usage) {
|
messages,
|
||||||
tokenMetrics.low.input += usage.promptTokens;
|
(text, _model, usage) => {
|
||||||
tokenMetrics.low.output += usage.completionTokens;
|
responseText = text;
|
||||||
tokenMetrics.low.total += usage.totalTokens;
|
if (usage) {
|
||||||
}
|
tokenMetrics.low.input += usage.promptTokens;
|
||||||
}, undefined, 'low');
|
tokenMetrics.low.output += usage.completionTokens;
|
||||||
|
tokenMetrics.low.total += usage.totalTokens;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
undefined,
|
||||||
|
"low",
|
||||||
|
);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Extract the JSON response from the output tags
|
// Extract the JSON response from the output tags
|
||||||
@ -1134,90 +1173,6 @@ export class KnowledgeGraphService {
|
|||||||
return { resolvedStatements, invalidatedStatements };
|
return { resolvedStatements, invalidatedStatements };
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Add attributes to entity nodes based on the resolved statements
|
|
||||||
*/
|
|
||||||
private async addAttributesToEntities(
|
|
||||||
triples: Triple[],
|
|
||||||
episode: EpisodicNode,
|
|
||||||
tokenMetrics: { high: { input: number; output: number; total: number }; low: { input: number; output: number; total: number } },
|
|
||||||
): Promise<Triple[]> {
|
|
||||||
// Collect all unique entities from the triples
|
|
||||||
const entityMap = new Map<string, EntityNode>();
|
|
||||||
|
|
||||||
// Add all subjects, predicates, and objects to the map
|
|
||||||
triples.forEach((triple) => {
|
|
||||||
if (triple.subject) {
|
|
||||||
entityMap.set(triple.subject.uuid, triple.subject);
|
|
||||||
}
|
|
||||||
if (triple.predicate) {
|
|
||||||
entityMap.set(triple.predicate.uuid, triple.predicate);
|
|
||||||
}
|
|
||||||
if (triple.object) {
|
|
||||||
entityMap.set(triple.object.uuid, triple.object);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Convert the map to an array of entities
|
|
||||||
const entities = Array.from(entityMap.values());
|
|
||||||
|
|
||||||
if (entities.length === 0) {
|
|
||||||
return triples; // No entities to process
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prepare simplified context for the LLM
|
|
||||||
const context = {
|
|
||||||
episodeContent: episode.content,
|
|
||||||
entities: entities.map((entity) => ({
|
|
||||||
uuid: entity.uuid,
|
|
||||||
name: entity.name,
|
|
||||||
currentAttributes: entity.attributes || {},
|
|
||||||
})),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Create a prompt for the LLM to extract attributes
|
|
||||||
const messages = extractAttributes(context);
|
|
||||||
|
|
||||||
let responseText = "";
|
|
||||||
|
|
||||||
// Attribute extraction is LOW complexity (simple key-value extraction)
|
|
||||||
await makeModelCall(false, messages as CoreMessage[], (text, _model, usage) => {
|
|
||||||
responseText = text;
|
|
||||||
if (usage) {
|
|
||||||
tokenMetrics.low.input += usage.promptTokens;
|
|
||||||
tokenMetrics.low.output += usage.completionTokens;
|
|
||||||
tokenMetrics.low.total += usage.totalTokens;
|
|
||||||
}
|
|
||||||
}, undefined, 'low');
|
|
||||||
|
|
||||||
try {
|
|
||||||
const outputMatch = responseText.match(/<output>([\s\S]*?)<\/output>/);
|
|
||||||
if (outputMatch && outputMatch[1]) {
|
|
||||||
responseText = outputMatch[1].trim();
|
|
||||||
}
|
|
||||||
// Parse the LLM response
|
|
||||||
const responseData = JSON.parse(responseText);
|
|
||||||
const updatedEntities = responseData.entities || [];
|
|
||||||
|
|
||||||
// Update entity attributes and save them
|
|
||||||
for (const updatedEntity of updatedEntities) {
|
|
||||||
const entity = entityMap.get(updatedEntity.uuid);
|
|
||||||
if (entity) {
|
|
||||||
// Merge the existing attributes with the new ones
|
|
||||||
entity.attributes = {
|
|
||||||
...updatedEntity.attributes,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`Updated attributes for ${updatedEntities.length} entities`);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error("Error processing entity attributes", { error });
|
|
||||||
}
|
|
||||||
|
|
||||||
return triples;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Normalize an episode by extracting entities and creating nodes and statements
|
* Normalize an episode by extracting entities and creating nodes and statements
|
||||||
*/
|
*/
|
||||||
@ -1226,7 +1181,10 @@ export class KnowledgeGraphService {
|
|||||||
source: string,
|
source: string,
|
||||||
userId: string,
|
userId: string,
|
||||||
prisma: PrismaClient,
|
prisma: PrismaClient,
|
||||||
tokenMetrics: { high: { input: number; output: number; total: number }; low: { input: number; output: number; total: number } },
|
tokenMetrics: {
|
||||||
|
high: { input: number; output: number; total: number };
|
||||||
|
low: { input: number; output: number; total: number };
|
||||||
|
},
|
||||||
episodeTimestamp?: Date,
|
episodeTimestamp?: Date,
|
||||||
sessionContext?: string,
|
sessionContext?: string,
|
||||||
contentType?: EpisodeType,
|
contentType?: EpisodeType,
|
||||||
@ -1263,14 +1221,20 @@ export class KnowledgeGraphService {
|
|||||||
: normalizePrompt(context);
|
: normalizePrompt(context);
|
||||||
// Normalization is LOW complexity (text cleaning and standardization)
|
// Normalization is LOW complexity (text cleaning and standardization)
|
||||||
let responseText = "";
|
let responseText = "";
|
||||||
await makeModelCall(false, messages, (text, _model, usage) => {
|
await makeModelCall(
|
||||||
responseText = text;
|
false,
|
||||||
if (usage) {
|
messages,
|
||||||
tokenMetrics.low.input += usage.promptTokens;
|
(text, _model, usage) => {
|
||||||
tokenMetrics.low.output += usage.completionTokens;
|
responseText = text;
|
||||||
tokenMetrics.low.total += usage.totalTokens;
|
if (usage) {
|
||||||
}
|
tokenMetrics.low.input += usage.promptTokens;
|
||||||
}, undefined, 'high');
|
tokenMetrics.low.output += usage.completionTokens;
|
||||||
|
tokenMetrics.low.total += usage.totalTokens;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
undefined,
|
||||||
|
"high",
|
||||||
|
);
|
||||||
let normalizedEpisodeBody = "";
|
let normalizedEpisodeBody = "";
|
||||||
const outputMatch = responseText.match(/<output>([\s\S]*?)<\/output>/);
|
const outputMatch = responseText.match(/<output>([\s\S]*?)<\/output>/);
|
||||||
if (outputMatch && outputMatch[1]) {
|
if (outputMatch && outputMatch[1]) {
|
||||||
|
|||||||
@ -58,6 +58,7 @@ async function createMcpServer(
|
|||||||
// Handle memory tools and integration meta-tools
|
// Handle memory tools and integration meta-tools
|
||||||
if (
|
if (
|
||||||
name.startsWith("memory_") ||
|
name.startsWith("memory_") ||
|
||||||
|
name === "get_session_id" ||
|
||||||
name === "get_integrations" ||
|
name === "get_integrations" ||
|
||||||
name === "get_integration_actions" ||
|
name === "get_integration_actions" ||
|
||||||
name === "execute_integration_action"
|
name === "execute_integration_action"
|
||||||
@ -266,7 +267,7 @@ export const handleSessionRequest = async (
|
|||||||
|
|
||||||
await transport.handleRequest(req, res);
|
await transport.handleRequest(req, res);
|
||||||
} else {
|
} else {
|
||||||
res.status(400).send("Invalid or missing session ID");
|
res.status(401).send("Invalid or missing session ID");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@ -320,6 +320,14 @@ export async function getOrCreatePersonalAccessToken({
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function deletePersonalAccessToken(tokenId: string) {
|
||||||
|
return await prisma.personalAccessToken.delete({
|
||||||
|
where: {
|
||||||
|
id: tokenId,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/** Created a new PersonalAccessToken, and return the token. We only ever return the unencrypted token once. */
|
/** Created a new PersonalAccessToken, and return the token. We only ever return the unencrypted token once. */
|
||||||
export async function createPersonalAccessToken({
|
export async function createPersonalAccessToken({
|
||||||
name,
|
name,
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user