mirror of
https://github.com/eliasstepanik/core.git
synced 2026-01-12 05:28:29 +00:00
Compare commits
68 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f038ad5c61 | ||
|
|
4f27d2128b | ||
|
|
c869096be8 | ||
|
|
c5407be54d | ||
|
|
6c37b41ca4 | ||
|
|
023a220d3e | ||
|
|
b9c4fc13c2 | ||
|
|
0ad2bba2ad | ||
|
|
faad985e48 | ||
|
|
8de059bb2e | ||
|
|
76228d6aac | ||
|
|
6ac74a3f0b | ||
|
|
b255bbe7e6 | ||
|
|
da3d06782e | ||
|
|
a727671a30 | ||
|
|
e7ed6eb288 | ||
|
|
5b31c8ed62 | ||
|
|
f39c7cc6d0 | ||
|
|
b78713df41 | ||
|
|
6f1037e8e1 | ||
|
|
af56d7016e | ||
|
|
3a10ee53e8 | ||
|
|
ef1c8eac52 | ||
|
|
33bec831c6 | ||
|
|
8a6b06383e | ||
|
|
60dd4bfa6f | ||
|
|
00f983079f | ||
|
|
170eed76fb | ||
|
|
1db2628af4 | ||
|
|
95636f96a8 | ||
|
|
bcae1bd4a1 | ||
|
|
e372a38572 | ||
|
|
b0e141c2a2 | ||
|
|
d0126797de | ||
|
|
6732ff71c5 | ||
|
|
7523c99660 | ||
|
|
e7b43602c5 | ||
|
|
c8252a1c89 | ||
|
|
0616c1debd | ||
|
|
ddb7604fb2 | ||
|
|
3bdf051b32 | ||
|
|
a14b83d66d | ||
|
|
2281dab166 | ||
|
|
ecba7f5aa0 | ||
|
|
bcc0560cf0 | ||
|
|
27f8740691 | ||
|
|
3d1b93d97d | ||
|
|
665f98d7bf | ||
|
|
159e003d2e | ||
|
|
7c737cf51f | ||
|
|
f0debd5678 | ||
|
|
27762262d2 | ||
|
|
489fb5934a | ||
|
|
92ca34a02f | ||
|
|
46407b0fac | ||
|
|
5347c7a700 | ||
|
|
dc9b149445 | ||
|
|
f539ad1ecd | ||
|
|
7903dd08c3 | ||
|
|
1509e8d502 | ||
|
|
62fdf6181a | ||
|
|
812d7dea51 | ||
|
|
59620151f2 | ||
|
|
5150fab210 | ||
|
|
a0b3128329 | ||
|
|
a4b6a4f984 | ||
|
|
840ca64174 | ||
|
|
43c3482351 |
16
.env.example
16
.env.example
@ -1,4 +1,4 @@
|
|||||||
VERSION=0.1.22
|
VERSION=0.1.27
|
||||||
|
|
||||||
# Nest run in docker, change host to database container name
|
# Nest run in docker, change host to database container name
|
||||||
DB_HOST=localhost
|
DB_HOST=localhost
|
||||||
@ -41,17 +41,17 @@ NEO4J_USERNAME=neo4j
|
|||||||
NEO4J_PASSWORD=27192e6432564f4788d55c15131bd5ac
|
NEO4J_PASSWORD=27192e6432564f4788d55c15131bd5ac
|
||||||
OPENAI_API_KEY=
|
OPENAI_API_KEY=
|
||||||
|
|
||||||
|
|
||||||
MAGIC_LINK_SECRET=27192e6432564f4788d55c15131bd5ac
|
MAGIC_LINK_SECRET=27192e6432564f4788d55c15131bd5ac
|
||||||
|
|
||||||
|
|
||||||
NEO4J_AUTH=neo4j/27192e6432564f4788d55c15131bd5ac
|
NEO4J_AUTH=neo4j/27192e6432564f4788d55c15131bd5ac
|
||||||
OLLAMA_URL=http://ollama:11434
|
OLLAMA_URL=http://ollama:11434
|
||||||
|
|
||||||
EMBEDDING_MODEL=text-embedding-3-small
|
EMBEDDING_MODEL=text-embedding-3-small
|
||||||
MODEL=gpt-4.1-2025-04-14
|
MODEL=gpt-4.1-2025-04-14
|
||||||
|
|
||||||
## Trigger ##
|
## AWS Bedrock ##
|
||||||
TRIGGER_PROJECT_ID=
|
AWS_ACCESS_KEY_ID=
|
||||||
TRIGGER_SECRET_KEY=
|
AWS_SECRET_ACCESS_KEY=
|
||||||
TRIGGER_API_URL=http://host.docker.internal:8030
|
AWS_REGION=us-east-1
|
||||||
|
|
||||||
|
QUEUE_PROVIDER=bullmq
|
||||||
|
|
||||||
|
|||||||
26
.github/workflows/build-docker-image.yml
vendored
26
.github/workflows/build-docker-image.yml
vendored
@ -7,32 +7,6 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-init:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
ref: main
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v1
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v1
|
|
||||||
|
|
||||||
- name: Login to Docker Registry
|
|
||||||
run: echo "${{ secrets.DOCKER_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_USERNAME }}" --password-stdin
|
|
||||||
|
|
||||||
- name: Build and Push Frontend Docker Image
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ./apps/init/Dockerfile
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: redplanethq/init:${{ github.ref_name }}
|
|
||||||
|
|
||||||
build-webapp:
|
build-webapp:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
|||||||
17
.gitignore
vendored
17
.gitignore
vendored
@ -46,13 +46,14 @@ registry/
|
|||||||
|
|
||||||
.cursor
|
.cursor
|
||||||
CLAUDE.md
|
CLAUDE.md
|
||||||
|
AGENTS.md
|
||||||
|
|
||||||
.claude
|
.claude
|
||||||
.clinerules/byterover-rules.md
|
.clinerules
|
||||||
.kilocode/rules/byterover-rules.md
|
.kilocode
|
||||||
.roo/rules/byterover-rules.md
|
.roo
|
||||||
.windsurf/rules/byterover-rules.md
|
.windsurf
|
||||||
.cursor/rules/byterover-rules.mdc
|
.cursor
|
||||||
.kiro/steering/byterover-rules.md
|
.kiro
|
||||||
.qoder/rules/byterover-rules.md
|
.qoder
|
||||||
.augment/rules/byterover-rules.md
|
.augment
|
||||||
7
.vscode/settings.json
vendored
7
.vscode/settings.json
vendored
@ -1,7 +0,0 @@
|
|||||||
{
|
|
||||||
"eslint.workingDirectories": [
|
|
||||||
{
|
|
||||||
"mode": "auto"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
2
LICENSE
2
LICENSE
@ -1,4 +1,4 @@
|
|||||||
Sol License
|
Core License
|
||||||
|
|
||||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||||
Version 3, 19 November 2007
|
Version 3, 19 November 2007
|
||||||
|
|||||||
106
README.md
106
README.md
@ -33,7 +33,7 @@
|
|||||||
<img src="https://github.com/user-attachments/assets/89066cdd-204b-46c2-8ad4-4935f5ca9edd" width="200px" alt="CORE logo" />
|
<img src="https://github.com/user-attachments/assets/89066cdd-204b-46c2-8ad4-4935f5ca9edd" width="200px" alt="CORE logo" />
|
||||||
</a>
|
</a>
|
||||||
|
|
||||||
### CORE: Unified Memory Layer for Claude, Cursor, ChatGPT & All AI Tools
|
### CORE: Your Personal Memory Layer for AI Apps
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://deepwiki.com/RedPlanetHQ/core">
|
<a href="https://deepwiki.com/RedPlanetHQ/core">
|
||||||
@ -41,21 +41,21 @@
|
|||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://docs.heysol.ai/core/overview"><b>Documentation</b></a> •
|
<a href="https://docs.heysol.ai/introduction"><b>Documentation</b></a> •
|
||||||
<a href="https://discord.gg/YGUZcvDjUa"><b>Discord</b></a>
|
<a href="https://discord.gg/YGUZcvDjUa"><b>Discord</b></a>
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
## 🔥 Research Highlights
|
## 🔥 Research Highlights
|
||||||
|
|
||||||
CORE memory achieves **88.24%** average accuracy in Locomo dataset across all reasoning tasks, significantly outperforming other memory providers. Check out this [blog](https://blog.heysol.ai/we-built-memory-for-individuals-and-achieved-sota-on-locomo-benchmark/) for more info.
|
CORE memory achieves **88.24%** average accuracy in Locomo dataset across all reasoning tasks, significantly outperforming other memory providers. Check out this [blog](https://blog.heysol.ai/core-build-memory-knowledge-graph-for-individuals-and-achieved-sota-on-locomo-benchmark/) for more info.
|
||||||
|
|
||||||
<img width="6048" height="3428" alt="benchmark" src="https://github.com/user-attachments/assets/2e5fdac5-02ed-4d00-9312-c21d09974e1f" />
|
<img width="6048" height="3428" alt="benchmark" src="https://github.com/user-attachments/assets/2e5fdac5-02ed-4d00-9312-c21d09974e1f" />
|
||||||
(1) Single-hop questions require answers based on a single session; (2) Multi-hop questions require synthesizing information from multiple different sessions; (3) Open-domain knowledge questions can be answered by integrating a speaker’s provided information with external knowledge such as commonsense or world facts; (4) Temporal reasoning questions can be answered through temporal reasoning and capturing time-related data cues within the conversation;
|
(1) Single-hop questions require answers based on a single session; (2) Multi-hop questions require synthesizing information from multiple different sessions; (3) Open-domain knowledge questions can be answered by integrating a speaker’s provided information with external knowledge such as commonsense or world facts; (4) Temporal reasoning questions can be answered through temporal reasoning and capturing time-related data cues within the conversation;
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
**Problem**
|
**Problem**
|
||||||
|
|
||||||
Developers waste time re-explaining context to AI tools. Hit token limits in Claude? Start fresh and lose everything. Switch from ChatGPT/Claude to Cursor? Explain your context again. Your conversations, decisions, and insights vanish between sessions. With every new AI tool, the cost of context switching grows.
|
Developers waste time re-explaining context to AI tools. Hit token limits in Claude? Start fresh and lose everything. Switch from ChatGPT/Claude to Cursor? Explain your context again. Your conversations, decisions, and insights vanish between sessions. With every new AI tool, the cost of context switching grows.
|
||||||
|
|
||||||
@ -63,43 +63,83 @@ Developers waste time re-explaining context to AI tools. Hit token limits in Cla
|
|||||||
|
|
||||||
CORE is an open-source unified, persistent memory layer for all your AI tools. Your context follows you from Cursor to Claude to ChatGPT to Claude Code. One knowledge graph remembers who said what, when, and why. Connect once, remember everywhere. Stop managing context and start building.
|
CORE is an open-source unified, persistent memory layer for all your AI tools. Your context follows you from Cursor to Claude to ChatGPT to Claude Code. One knowledge graph remembers who said what, when, and why. Connect once, remember everywhere. Stop managing context and start building.
|
||||||
|
|
||||||
## 🚀 Get Started
|
## 🚀 CORE Self-Hosting
|
||||||
|
|
||||||
|
Want to run CORE on your own infrastructure? Self-hosting gives you complete control over your data and deployment.
|
||||||
|
|
||||||
|
**Quick Deploy Options:**
|
||||||
|
|
||||||
|
[](https://railway.com/deploy/core?referralCode=LHvbIb&utm_medium=integration&utm_source=template&utm_campaign=generic)
|
||||||
|
|
||||||
|
**Prerequisites**:
|
||||||
|
|
||||||
|
- Docker (20.10.0+) and Docker Compose (2.20.0+) installed
|
||||||
|
- OpenAI API key
|
||||||
|
|
||||||
|
> **Note on Open-Source Models:** We tested OSS options like Ollama and GPT models, but their fact extraction and graph quality fell short. We're actively looking for options.
|
||||||
|
|
||||||
|
### Setup
|
||||||
|
|
||||||
|
1. Clone the repository:
|
||||||
|
|
||||||
|
```
|
||||||
|
git clone https://github.com/RedPlanetHQ/core.git
|
||||||
|
cd core
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Configure environment variables in `core/.env`:
|
||||||
|
|
||||||
|
```
|
||||||
|
OPENAI_API_KEY=your_openai_api_key
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Start the service
|
||||||
|
|
||||||
|
```
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
Once deployed, you can configure your AI providers (OpenAI, Anthropic) and start building your memory graph.
|
||||||
|
|
||||||
|
👉 [View complete self-hosting guide](https://docs.heysol.ai/self-hosting/docker)
|
||||||
|
|
||||||
|
Note: We tried open-source models like Ollama or GPT OSS but facts generation were not good, we are still figuring out how to improve on that and then will also support OSS models.
|
||||||
|
|
||||||
|
## 🚀 CORE Cloud
|
||||||
|
|
||||||
**Build your unified memory graph in 5 minutes:**
|
**Build your unified memory graph in 5 minutes:**
|
||||||
|
|
||||||
|
Don't want to manage infrastructure? CORE Cloud lets you build your personal memory system instantly - no setup, no servers, just memory that works.
|
||||||
|
|
||||||
1. **Sign Up** at [core.heysol.ai](https://core.heysol.ai) and create your account
|
1. **Sign Up** at [core.heysol.ai](https://core.heysol.ai) and create your account
|
||||||
2. **Add your first memory** - share context about yourself
|
2. **Visualize your memory graph** and see how CORE automatically forms connections between facts
|
||||||
|
3. **Test it out** - ask "What do you know about me?" in conversation section
|
||||||
<img width="2088" height="1212" alt="first-memory" src="https://github.com/user-attachments/assets/ecfab88e-e91a-474d-9ef5-fc6c19b655a8" />
|
4. Connect to your tools:
|
||||||
|
|
||||||
|
|
||||||
3. **Visualize your memory graph** and see how CORE automatically forms connections between facts
|
|
||||||
5. **Test it out** - ask "What do you know about me?" in conversatio section
|
|
||||||
6. Connect to your tools:
|
|
||||||
- [Claude](https://docs.heysol.ai/providers/claude) & [Cursor](https://docs.heysol.ai/providers/cursor) - coding with context
|
- [Claude](https://docs.heysol.ai/providers/claude) & [Cursor](https://docs.heysol.ai/providers/cursor) - coding with context
|
||||||
- [CLaude Code CLI](https://docs.heysol.ai/providers/claude-code) & [Gemini CLI](https://docs.heysol.ai/providers/claude-code) - terminal-based coding with memory
|
- [CLaude Code CLI](https://docs.heysol.ai/providers/claude-code) & [Codex CLI](https://docs.heysol.ai/providers/codex) - terminal-based coding with memory
|
||||||
- [Add Browser Extension](https://docs.heysol.ai/providers/browser-extension) - bring your memory to any website
|
- [Add Browser Extension](https://docs.heysol.ai/providers/browser-extension) - bring your memory to any website
|
||||||
- [Linear](https://docs.heysol.ai/integrations/linear), [Github](https://docs.heysol.ai/integrations/github) - add project context automatically
|
- [Linear](https://docs.heysol.ai/integrations/linear), [Github](https://docs.heysol.ai/integrations/github) - add project context automatically
|
||||||
|
|
||||||
## 🧩 Key Features
|
## 🧩 Key Features
|
||||||
|
|
||||||
### 🧠 **Unified, Portable Memory**:
|
### 🧠 **Unified, Portable Memory**:
|
||||||
|
|
||||||
Add and recall your memory across **Cursor, Windsurf, Claude Desktop, Claude Code, Gemini CLI, AWS's Kiro, VS Code, and Roo Code** via MCP
|
Add and recall your memory across **Cursor, Windsurf, Claude Desktop, Claude Code, Gemini CLI, AWS's Kiro, VS Code, and Roo Code** via MCP
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
### 🕸️ **Temporal + Reified Knowledge Graph**:
|
||||||
### 🕸️ **Temporal + Reified Knowledge Graph**:
|
|
||||||
|
|
||||||
Remember the story behind every fact—track who said what, when, and why with rich relationships and full provenance, not just flat storage
|
Remember the story behind every fact—track who said what, when, and why with rich relationships and full provenance, not just flat storage
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
### 🌐 **Browser Extension**:
|
||||||
### 🌐 **Browser Extension**:
|
|
||||||
|
|
||||||
Save conversations and content from ChatGPT, Grok, Gemini, Twitter, YouTube, blog posts, and any webpage directly into your CORE memory.
|
Save conversations and content from ChatGPT, Grok, Gemini, Twitter, YouTube, blog posts, and any webpage directly into your CORE memory.
|
||||||
|
|
||||||
**How to Use Extension**
|
**How to Use Extension**
|
||||||
|
|
||||||
1. [Download the Extension](https://chromewebstore.google.com/detail/core-extension/cglndoindnhdbfcbijikibfjoholdjcc) from the Chrome Web Store.
|
1. [Download the Extension](https://chromewebstore.google.com/detail/core-extension/cglndoindnhdbfcbijikibfjoholdjcc) from the Chrome Web Store.
|
||||||
2. Login to [CORE dashboard](https://core.heysol.ai)
|
2. Login to [CORE dashboard](https://core.heysol.ai)
|
||||||
- Navigate to Settings (bottom left)
|
- Navigate to Settings (bottom left)
|
||||||
@ -108,29 +148,26 @@ Save conversations and content from ChatGPT, Grok, Gemini, Twitter, YouTube, blo
|
|||||||
|
|
||||||
https://github.com/user-attachments/assets/6e629834-1b9d-4fe6-ae58-a9068986036a
|
https://github.com/user-attachments/assets/6e629834-1b9d-4fe6-ae58-a9068986036a
|
||||||
|
|
||||||
|
### 💬 **Chat with Memory**:
|
||||||
|
|
||||||
### 💬 **Chat with Memory**:
|
|
||||||
Ask questions like "What are my writing preferences?" with instant insights from your connected knowledge
|
Ask questions like "What are my writing preferences?" with instant insights from your connected knowledge
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
### ⚡ **Auto-Sync from Apps**:
|
||||||
### ⚡ **Auto-Sync from Apps**:
|
|
||||||
|
|
||||||
Automatically capture relevant context from Linear, Slack, Notion, GitHub and other connected apps into your CORE memory
|
Automatically capture relevant context from Linear, Slack, Notion, GitHub and other connected apps into your CORE memory
|
||||||
|
|
||||||
|
📖 **[View All Integrations](./integrations/README.md)** - Complete list of supported services and their features
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
### 🔗 **MCP Integration Hub**:
|
||||||
### 🔗 **MCP Integration Hub**:
|
|
||||||
|
|
||||||
Connect Linear, Slack, GitHub, Notion once to CORE—then use all their tools in Claude, Cursor, or any MCP client with a single URL
|
Connect Linear, Slack, GitHub, Notion once to CORE—then use all their tools in Claude, Cursor, or any MCP client with a single URL
|
||||||
|
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## How CORE create memory
|
## How CORE create memory
|
||||||
|
|
||||||
<img width="12885" height="3048" alt="memory-ingest-diagram" src="https://github.com/user-attachments/assets/c51679de-8260-4bee-bebf-aff32c6b8e13" />
|
<img width="12885" height="3048" alt="memory-ingest-diagram" src="https://github.com/user-attachments/assets/c51679de-8260-4bee-bebf-aff32c6b8e13" />
|
||||||
@ -144,7 +181,6 @@ CORE’s ingestion pipeline has four phases designed to capture evolving context
|
|||||||
|
|
||||||
The Result: Instead of a flat database, CORE gives you a memory that grows and changes with you - preserving context, evolution, and ownership so agents can actually use it.
|
The Result: Instead of a flat database, CORE gives you a memory that grows and changes with you - preserving context, evolution, and ownership so agents can actually use it.
|
||||||
|
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## How CORE recalls from memory
|
## How CORE recalls from memory
|
||||||
@ -168,9 +204,11 @@ Explore our documentation to get the most out of CORE
|
|||||||
- [Self Hosting](https://docs.heysol.ai/self-hosting/overview)
|
- [Self Hosting](https://docs.heysol.ai/self-hosting/overview)
|
||||||
- [Connect Core MCP with Claude](https://docs.heysol.ai/providers/claude)
|
- [Connect Core MCP with Claude](https://docs.heysol.ai/providers/claude)
|
||||||
- [Connect Core MCP with Cursor](https://docs.heysol.ai/providers/cursor)
|
- [Connect Core MCP with Cursor](https://docs.heysol.ai/providers/cursor)
|
||||||
|
- [Connect Core MCP with Claude Code](https://docs.heysol.ai/providers/claude-code)
|
||||||
|
- [Connect Core MCP with Codex](https://docs.heysol.ai/providers/codex)
|
||||||
|
|
||||||
- [Basic Concepts](https://docs.heysol.ai/overview)
|
- [Basic Concepts](https://docs.heysol.ai/overview)
|
||||||
- [API Reference](https://docs.heysol.ai/local-setup)
|
- [API Reference](https://docs.heysol.ai/api-reference/get-user-profile)
|
||||||
|
|
||||||
## 🔒 Security
|
## 🔒 Security
|
||||||
|
|
||||||
@ -179,7 +217,7 @@ CORE takes security seriously. We implement industry-standard security practices
|
|||||||
- **Data Encryption**: All data in transit (TLS 1.3) and at rest (AES-256)
|
- **Data Encryption**: All data in transit (TLS 1.3) and at rest (AES-256)
|
||||||
- **Authentication**: OAuth 2.0 and magic link authentication
|
- **Authentication**: OAuth 2.0 and magic link authentication
|
||||||
- **Access Control**: Workspace-based isolation and role-based permissions
|
- **Access Control**: Workspace-based isolation and role-based permissions
|
||||||
- **Vulnerability Reporting**: Please report security issues to harshith@tegon.ai
|
- **Vulnerability Reporting**: Please report security issues to harshith@poozle.dev
|
||||||
|
|
||||||
For detailed security information, see our [Security Policy](SECURITY.md).
|
For detailed security information, see our [Security Policy](SECURITY.md).
|
||||||
|
|
||||||
@ -212,9 +250,11 @@ Have questions or feedback? We're here to help:
|
|||||||
<a href="https://github.com/RedPlanetHQ/core/graphs/contributors">
|
<a href="https://github.com/RedPlanetHQ/core/graphs/contributors">
|
||||||
<img src="https://contrib.rocks/image?repo=RedPlanetHQ/core" />
|
<img src="https://contrib.rocks/image?repo=RedPlanetHQ/core" />
|
||||||
</a>
|
</a>
|
||||||
|
<<<<<<< Updated upstream
|
||||||
|
|
||||||
|
<<<<<<< HEAD
|
||||||
|
|
||||||
|
# =======
|
||||||
|
|
||||||
|
> > > > > > > Stashed changes
|
||||||
|
> > > > > > > 62db6c1 (feat: automatic space identification)
|
||||||
|
|
||||||
|
|||||||
51
apps/init/.gitignore
vendored
51
apps/init/.gitignore
vendored
@ -1,51 +0,0 @@
|
|||||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
|
||||||
|
|
||||||
# Dependencies
|
|
||||||
node_modules
|
|
||||||
.pnp
|
|
||||||
.pnp.js
|
|
||||||
|
|
||||||
# Local env files
|
|
||||||
.env
|
|
||||||
.env.local
|
|
||||||
.env.development.local
|
|
||||||
.env.test.local
|
|
||||||
.env.production.local
|
|
||||||
|
|
||||||
# Testing
|
|
||||||
coverage
|
|
||||||
|
|
||||||
# Turbo
|
|
||||||
.turbo
|
|
||||||
|
|
||||||
# Vercel
|
|
||||||
.vercel
|
|
||||||
|
|
||||||
# Build Outputs
|
|
||||||
.next/
|
|
||||||
out/
|
|
||||||
build
|
|
||||||
dist
|
|
||||||
.tshy/
|
|
||||||
.tshy-build/
|
|
||||||
|
|
||||||
# Debug
|
|
||||||
npm-debug.log*
|
|
||||||
yarn-debug.log*
|
|
||||||
yarn-error.log*
|
|
||||||
|
|
||||||
# Misc
|
|
||||||
.DS_Store
|
|
||||||
*.pem
|
|
||||||
|
|
||||||
docker-compose.dev.yaml
|
|
||||||
|
|
||||||
clickhouse/
|
|
||||||
.vscode/
|
|
||||||
registry/
|
|
||||||
|
|
||||||
.cursor
|
|
||||||
CLAUDE.md
|
|
||||||
|
|
||||||
.claude
|
|
||||||
|
|
||||||
@ -1,70 +0,0 @@
|
|||||||
ARG NODE_IMAGE=node:20.11.1-bullseye-slim@sha256:5a5a92b3a8d392691c983719dbdc65d9f30085d6dcd65376e7a32e6fe9bf4cbe
|
|
||||||
|
|
||||||
FROM ${NODE_IMAGE} AS pruner
|
|
||||||
|
|
||||||
WORKDIR /core
|
|
||||||
|
|
||||||
COPY --chown=node:node . .
|
|
||||||
RUN npx -q turbo@2.5.3 prune --scope=@redplanethq/init --docker
|
|
||||||
RUN find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
|
||||||
|
|
||||||
# Base strategy to have layer caching
|
|
||||||
FROM ${NODE_IMAGE} AS base
|
|
||||||
RUN apt-get update && apt-get install -y openssl dumb-init postgresql-client
|
|
||||||
WORKDIR /core
|
|
||||||
COPY --chown=node:node .gitignore .gitignore
|
|
||||||
COPY --from=pruner --chown=node:node /core/out/json/ .
|
|
||||||
COPY --from=pruner --chown=node:node /core/out/pnpm-lock.yaml ./pnpm-lock.yaml
|
|
||||||
COPY --from=pruner --chown=node:node /core/out/pnpm-workspace.yaml ./pnpm-workspace.yaml
|
|
||||||
|
|
||||||
## Dev deps
|
|
||||||
FROM base AS dev-deps
|
|
||||||
WORKDIR /core
|
|
||||||
# Corepack is used to install pnpm
|
|
||||||
RUN corepack enable
|
|
||||||
ENV NODE_ENV development
|
|
||||||
RUN pnpm install --ignore-scripts --no-frozen-lockfile
|
|
||||||
|
|
||||||
## Production deps
|
|
||||||
FROM base AS production-deps
|
|
||||||
WORKDIR /core
|
|
||||||
# Corepack is used to install pnpm
|
|
||||||
RUN corepack enable
|
|
||||||
ENV NODE_ENV production
|
|
||||||
RUN pnpm install --prod --no-frozen-lockfile
|
|
||||||
|
|
||||||
## Builder (builds the init CLI)
|
|
||||||
FROM base AS builder
|
|
||||||
WORKDIR /core
|
|
||||||
# Corepack is used to install pnpm
|
|
||||||
RUN corepack enable
|
|
||||||
|
|
||||||
COPY --from=pruner --chown=node:node /core/out/full/ .
|
|
||||||
COPY --from=dev-deps --chown=node:node /core/ .
|
|
||||||
COPY --chown=node:node turbo.json turbo.json
|
|
||||||
COPY --chown=node:node .configs/tsconfig.base.json .configs/tsconfig.base.json
|
|
||||||
RUN pnpm run build --filter=@redplanethq/init...
|
|
||||||
|
|
||||||
# Runner
|
|
||||||
FROM ${NODE_IMAGE} AS runner
|
|
||||||
RUN apt-get update && apt-get install -y openssl postgresql-client ca-certificates
|
|
||||||
WORKDIR /core
|
|
||||||
RUN corepack enable
|
|
||||||
ENV NODE_ENV production
|
|
||||||
|
|
||||||
COPY --from=base /usr/bin/dumb-init /usr/bin/dumb-init
|
|
||||||
COPY --from=pruner --chown=node:node /core/out/full/ .
|
|
||||||
COPY --from=production-deps --chown=node:node /core .
|
|
||||||
COPY --from=builder --chown=node:node /core/apps/init/dist ./apps/init/dist
|
|
||||||
|
|
||||||
# Copy the trigger dump file
|
|
||||||
COPY --chown=node:node apps/init/trigger.dump ./apps/init/trigger.dump
|
|
||||||
|
|
||||||
# Copy and set up entrypoint script
|
|
||||||
COPY --chown=node:node apps/init/entrypoint.sh ./apps/init/entrypoint.sh
|
|
||||||
RUN chmod +x ./apps/init/entrypoint.sh
|
|
||||||
|
|
||||||
USER node
|
|
||||||
WORKDIR /core/apps/init
|
|
||||||
ENTRYPOINT ["dumb-init", "--"]
|
|
||||||
CMD ["./entrypoint.sh"]
|
|
||||||
@ -1,197 +0,0 @@
|
|||||||
# Core CLI
|
|
||||||
|
|
||||||
🧠 **CORE - Contextual Observation & Recall Engine**
|
|
||||||
|
|
||||||
A Command-Line Interface for setting up and managing the Core development environment.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm install -g @redplanethq/core
|
|
||||||
```
|
|
||||||
|
|
||||||
## Commands
|
|
||||||
|
|
||||||
### `core init`
|
|
||||||
|
|
||||||
**One-time setup command** - Initializes the Core development environment with full configuration.
|
|
||||||
|
|
||||||
### `core start`
|
|
||||||
|
|
||||||
**Daily usage command** - Starts all Core services (Docker containers).
|
|
||||||
|
|
||||||
### `core stop`
|
|
||||||
|
|
||||||
**Daily usage command** - Stops all Core services (Docker containers).
|
|
||||||
|
|
||||||
## Getting Started
|
|
||||||
|
|
||||||
### Prerequisites
|
|
||||||
|
|
||||||
- **Node.js** (v18.20.0 or higher)
|
|
||||||
- **Docker** and **Docker Compose**
|
|
||||||
- **Git**
|
|
||||||
- **pnpm** package manager
|
|
||||||
|
|
||||||
### Initial Setup
|
|
||||||
|
|
||||||
1. **Clone the Core repository:**
|
|
||||||
```bash
|
|
||||||
git clone https://github.com/redplanethq/core.git
|
|
||||||
cd core
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Run the initialization command:**
|
|
||||||
```bash
|
|
||||||
core init
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **The CLI will guide you through the complete setup process:**
|
|
||||||
|
|
||||||
#### Step 1: Prerequisites Check
|
|
||||||
- The CLI shows a checklist of required tools
|
|
||||||
- Confirms you're in the Core repository directory
|
|
||||||
- Exits with instructions if prerequisites aren't met
|
|
||||||
|
|
||||||
#### Step 2: Environment Configuration
|
|
||||||
|
|
||||||
- Copies `.env.example` to `.env` in the root directory
|
|
||||||
- Copies `trigger/.env.example` to `trigger/.env`
|
|
||||||
- Skips copying if `.env` files already exist
|
|
||||||
|
|
||||||
#### Step 3: Docker Services Startup
|
|
||||||
|
|
||||||
- Starts main Core services: `docker compose up -d`
|
|
||||||
- Starts Trigger.dev services: `docker compose up -d` (in trigger/ directory)
|
|
||||||
- Shows real-time output with progress indicators
|
|
||||||
|
|
||||||
#### Step 4: Database Health Check
|
|
||||||
|
|
||||||
- Verifies PostgreSQL is running on `localhost:5432`
|
|
||||||
- Retries for up to 60 seconds if needed
|
|
||||||
|
|
||||||
#### Step 5: Trigger.dev Setup (Interactive)
|
|
||||||
|
|
||||||
- **If Trigger.dev is not configured:**
|
|
||||||
|
|
||||||
1. Prompts you to open http://localhost:8030
|
|
||||||
2. Asks you to login to Trigger.dev
|
|
||||||
3. Guides you to create an organization and project
|
|
||||||
4. Collects your Project ID and Secret Key
|
|
||||||
5. Updates `.env` with your Trigger.dev configuration
|
|
||||||
6. Restarts Core services with new configuration
|
|
||||||
|
|
||||||
- **If Trigger.dev is already configured:**
|
|
||||||
- Skips setup and shows "Configuration already exists" message
|
|
||||||
|
|
||||||
#### Step 6: Docker Registry Login
|
|
||||||
|
|
||||||
- Displays docker login command with credentials from `.env`
|
|
||||||
- Waits for you to complete the login process
|
|
||||||
|
|
||||||
#### Step 7: Trigger.dev Task Deployment
|
|
||||||
|
|
||||||
- Automatically runs: `npx trigger.dev@v4-beta login -a http://localhost:8030`
|
|
||||||
- Deploys tasks with: `pnpm trigger:deploy`
|
|
||||||
- Shows manual deployment instructions if automatic deployment fails
|
|
||||||
|
|
||||||
#### Step 8: Setup Complete!
|
|
||||||
|
|
||||||
- Confirms all services are running
|
|
||||||
- Shows service URLs and connection information
|
|
||||||
|
|
||||||
## Daily Usage
|
|
||||||
|
|
||||||
After initial setup, use these commands for daily development:
|
|
||||||
|
|
||||||
### Start Services
|
|
||||||
|
|
||||||
```bash
|
|
||||||
core start
|
|
||||||
```
|
|
||||||
|
|
||||||
Starts all Docker containers for Core development.
|
|
||||||
|
|
||||||
### Stop Services
|
|
||||||
|
|
||||||
```bash
|
|
||||||
core stop
|
|
||||||
```
|
|
||||||
|
|
||||||
Stops all Docker containers.
|
|
||||||
|
|
||||||
## Service URLs
|
|
||||||
|
|
||||||
After setup, these services will be available:
|
|
||||||
|
|
||||||
- **Core Application**: http://localhost:3033
|
|
||||||
- **Trigger.dev**: http://localhost:8030
|
|
||||||
- **PostgreSQL**: localhost:5432
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Repository Not Found
|
|
||||||
|
|
||||||
If you run commands outside the Core repository:
|
|
||||||
|
|
||||||
- The CLI will ask you to confirm you're in the Core repository
|
|
||||||
- If not, it provides instructions to clone the repository
|
|
||||||
- Navigate to the Core repository directory before running commands again
|
|
||||||
|
|
||||||
### Docker Issues
|
|
||||||
|
|
||||||
- Ensure Docker is running
|
|
||||||
- Check Docker Compose is installed
|
|
||||||
- Verify you have sufficient system resources
|
|
||||||
|
|
||||||
### Trigger.dev Setup Issues
|
|
||||||
|
|
||||||
- Check container logs: `docker logs trigger-webapp --tail 50`
|
|
||||||
- Ensure you can access http://localhost:8030
|
|
||||||
- Verify your network allows connections to localhost
|
|
||||||
|
|
||||||
### Environment Variables
|
|
||||||
|
|
||||||
The CLI automatically manages these environment variables:
|
|
||||||
|
|
||||||
- `TRIGGER_PROJECT_ID` - Your Trigger.dev project ID
|
|
||||||
- `TRIGGER_SECRET_KEY` - Your Trigger.dev secret key
|
|
||||||
- Docker registry credentials for deployment
|
|
||||||
|
|
||||||
### Manual Trigger.dev Deployment
|
|
||||||
|
|
||||||
If automatic deployment fails, run manually:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npx trigger.dev@v4-beta login -a http://localhost:8030
|
|
||||||
pnpm trigger:deploy
|
|
||||||
```
|
|
||||||
|
|
||||||
## Development Workflow
|
|
||||||
|
|
||||||
1. **First time setup:** `core init`
|
|
||||||
2. **Daily development:**
|
|
||||||
- `core start` - Start your development environment
|
|
||||||
- Do your development work
|
|
||||||
- `core stop` - Stop services when done
|
|
||||||
|
|
||||||
## Support
|
|
||||||
|
|
||||||
For issues and questions:
|
|
||||||
|
|
||||||
- Check the main Core repository: https://github.com/redplanethq/core
|
|
||||||
- Review Docker container logs for troubleshooting
|
|
||||||
- Ensure all prerequisites are properly installed
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
- 🚀 **One-command setup** - Complete environment initialization
|
|
||||||
- 🔄 **Smart configuration** - Skips already configured components
|
|
||||||
- 📱 **Real-time feedback** - Live progress indicators and output
|
|
||||||
- 🐳 **Docker integration** - Full container lifecycle management
|
|
||||||
- 🔧 **Interactive setup** - Guided configuration process
|
|
||||||
- 🎯 **Error handling** - Graceful failure with recovery instructions
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Happy coding with Core!** 🎉
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
# Exit on any error
|
|
||||||
set -e
|
|
||||||
|
|
||||||
echo "Starting init CLI..."
|
|
||||||
|
|
||||||
# Wait for database to be ready
|
|
||||||
echo "Waiting for database connection..."
|
|
||||||
until pg_isready -h "${DB_HOST:-localhost}" -p "${DB_PORT:-5432}" -U "${POSTGRES_USER:-docker}"; do
|
|
||||||
echo "Database is unavailable - sleeping"
|
|
||||||
sleep 2
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "Database is ready!"
|
|
||||||
|
|
||||||
# Run the init command
|
|
||||||
echo "Running init command..."
|
|
||||||
node ./dist/esm/index.js init
|
|
||||||
|
|
||||||
echo "Init completed successfully!"
|
|
||||||
exit 0
|
|
||||||
@ -1,145 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@redplanethq/init",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"description": "A init service to create trigger instance",
|
|
||||||
"type": "module",
|
|
||||||
"license": "MIT",
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://github.com/redplanethq/core",
|
|
||||||
"directory": "apps/init"
|
|
||||||
},
|
|
||||||
"publishConfig": {
|
|
||||||
"access": "public"
|
|
||||||
},
|
|
||||||
"keywords": [
|
|
||||||
"typescript"
|
|
||||||
],
|
|
||||||
"files": [
|
|
||||||
"dist",
|
|
||||||
"trigger.dump"
|
|
||||||
],
|
|
||||||
"bin": {
|
|
||||||
"core": "./dist/esm/index.js"
|
|
||||||
},
|
|
||||||
"tshy": {
|
|
||||||
"selfLink": false,
|
|
||||||
"main": false,
|
|
||||||
"module": false,
|
|
||||||
"dialects": [
|
|
||||||
"esm"
|
|
||||||
],
|
|
||||||
"project": "./tsconfig.json",
|
|
||||||
"exclude": [
|
|
||||||
"**/*.test.ts"
|
|
||||||
],
|
|
||||||
"exports": {
|
|
||||||
"./package.json": "./package.json",
|
|
||||||
".": "./src/index.ts"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@epic-web/test-server": "^0.1.0",
|
|
||||||
"@types/gradient-string": "^1.1.2",
|
|
||||||
"@types/ini": "^4.1.1",
|
|
||||||
"@types/object-hash": "3.0.6",
|
|
||||||
"@types/polka": "^0.5.7",
|
|
||||||
"@types/react": "^18.2.48",
|
|
||||||
"@types/resolve": "^1.20.6",
|
|
||||||
"@types/rimraf": "^4.0.5",
|
|
||||||
"@types/semver": "^7.5.0",
|
|
||||||
"@types/source-map-support": "0.5.10",
|
|
||||||
"@types/ws": "^8.5.3",
|
|
||||||
"cpy-cli": "^5.0.0",
|
|
||||||
"execa": "^8.0.1",
|
|
||||||
"find-up": "^7.0.0",
|
|
||||||
"rimraf": "^5.0.7",
|
|
||||||
"ts-essentials": "10.0.1",
|
|
||||||
"tshy": "^3.0.2",
|
|
||||||
"tsx": "4.17.0"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"clean": "rimraf dist .tshy .tshy-build .turbo",
|
|
||||||
"typecheck": "tsc -p tsconfig.src.json --noEmit",
|
|
||||||
"build": "tshy",
|
|
||||||
"test": "vitest",
|
|
||||||
"test:e2e": "vitest --run -c ./e2e/vitest.config.ts"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@clack/prompts": "^0.10.0",
|
|
||||||
"@depot/cli": "0.0.1-cli.2.80.0",
|
|
||||||
"@opentelemetry/api": "1.9.0",
|
|
||||||
"@opentelemetry/api-logs": "0.52.1",
|
|
||||||
"@opentelemetry/exporter-logs-otlp-http": "0.52.1",
|
|
||||||
"@opentelemetry/exporter-trace-otlp-http": "0.52.1",
|
|
||||||
"@opentelemetry/instrumentation": "0.52.1",
|
|
||||||
"@opentelemetry/instrumentation-fetch": "0.52.1",
|
|
||||||
"@opentelemetry/resources": "1.25.1",
|
|
||||||
"@opentelemetry/sdk-logs": "0.52.1",
|
|
||||||
"@opentelemetry/sdk-node": "0.52.1",
|
|
||||||
"@opentelemetry/sdk-trace-base": "1.25.1",
|
|
||||||
"@opentelemetry/sdk-trace-node": "1.25.1",
|
|
||||||
"@opentelemetry/semantic-conventions": "1.25.1",
|
|
||||||
"ansi-escapes": "^7.0.0",
|
|
||||||
"braces": "^3.0.3",
|
|
||||||
"c12": "^1.11.1",
|
|
||||||
"chalk": "^5.2.0",
|
|
||||||
"chokidar": "^3.6.0",
|
|
||||||
"cli-table3": "^0.6.3",
|
|
||||||
"commander": "^9.4.1",
|
|
||||||
"defu": "^6.1.4",
|
|
||||||
"dotenv": "^16.4.5",
|
|
||||||
"dotenv-expand": "^12.0.2",
|
|
||||||
"esbuild": "^0.23.0",
|
|
||||||
"eventsource": "^3.0.2",
|
|
||||||
"evt": "^2.4.13",
|
|
||||||
"fast-npm-meta": "^0.2.2",
|
|
||||||
"git-last-commit": "^1.0.1",
|
|
||||||
"gradient-string": "^2.0.2",
|
|
||||||
"has-flag": "^5.0.1",
|
|
||||||
"import-in-the-middle": "1.11.0",
|
|
||||||
"import-meta-resolve": "^4.1.0",
|
|
||||||
"ini": "^5.0.0",
|
|
||||||
"jsonc-parser": "3.2.1",
|
|
||||||
"magicast": "^0.3.4",
|
|
||||||
"minimatch": "^10.0.1",
|
|
||||||
"mlly": "^1.7.1",
|
|
||||||
"nypm": "^0.5.4",
|
|
||||||
"nanoid": "3.3.8",
|
|
||||||
"object-hash": "^3.0.0",
|
|
||||||
"open": "^10.0.3",
|
|
||||||
"knex": "3.1.0",
|
|
||||||
"p-limit": "^6.2.0",
|
|
||||||
"p-retry": "^6.1.0",
|
|
||||||
"partysocket": "^1.0.2",
|
|
||||||
"pkg-types": "^1.1.3",
|
|
||||||
"polka": "^0.5.2",
|
|
||||||
"pg": "8.16.3",
|
|
||||||
"resolve": "^1.22.8",
|
|
||||||
"semver": "^7.5.0",
|
|
||||||
"signal-exit": "^4.1.0",
|
|
||||||
"source-map-support": "0.5.21",
|
|
||||||
"std-env": "^3.7.0",
|
|
||||||
"supports-color": "^10.0.0",
|
|
||||||
"tiny-invariant": "^1.2.0",
|
|
||||||
"tinyexec": "^0.3.1",
|
|
||||||
"tinyglobby": "^0.2.10",
|
|
||||||
"uuid": "11.1.0",
|
|
||||||
"ws": "^8.18.0",
|
|
||||||
"xdg-app-paths": "^8.3.0",
|
|
||||||
"zod": "3.23.8",
|
|
||||||
"zod-validation-error": "^1.5.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=18.20.0"
|
|
||||||
},
|
|
||||||
"exports": {
|
|
||||||
"./package.json": "./package.json",
|
|
||||||
".": {
|
|
||||||
"import": {
|
|
||||||
"types": "./dist/esm/index.d.ts",
|
|
||||||
"default": "./dist/esm/index.js"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,14 +0,0 @@
|
|||||||
import { Command } from "commander";
|
|
||||||
import { initCommand } from "../commands/init.js";
|
|
||||||
import { VERSION } from "./version.js";
|
|
||||||
|
|
||||||
const program = new Command();
|
|
||||||
|
|
||||||
program.name("core").description("Core CLI - A Command-Line Interface for Core").version(VERSION);
|
|
||||||
|
|
||||||
program
|
|
||||||
.command("init")
|
|
||||||
.description("Initialize Core development environment (run once)")
|
|
||||||
.action(initCommand);
|
|
||||||
|
|
||||||
program.parse(process.argv);
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
import { env } from "../utils/env.js";
|
|
||||||
|
|
||||||
export const VERSION = env.VERSION;
|
|
||||||
@ -1,36 +0,0 @@
|
|||||||
import { intro, outro, note } from "@clack/prompts";
|
|
||||||
import { printCoreBrainLogo } from "../utils/ascii.js";
|
|
||||||
import { initTriggerDatabase, updateWorkerImage } from "../utils/trigger.js";
|
|
||||||
|
|
||||||
export async function initCommand() {
|
|
||||||
// Display the CORE brain logo
|
|
||||||
printCoreBrainLogo();
|
|
||||||
|
|
||||||
intro("🚀 Core Development Environment Setup");
|
|
||||||
|
|
||||||
try {
|
|
||||||
await initTriggerDatabase();
|
|
||||||
await updateWorkerImage();
|
|
||||||
|
|
||||||
note(
|
|
||||||
[
|
|
||||||
"Your services will start running:",
|
|
||||||
"",
|
|
||||||
"• Core Application: http://localhost:3033",
|
|
||||||
"• Trigger.dev: http://localhost:8030",
|
|
||||||
"• PostgreSQL: localhost:5432",
|
|
||||||
"",
|
|
||||||
"You can now start developing with Core!",
|
|
||||||
"",
|
|
||||||
"ℹ️ When logging in to the Core Application, you can find the login URL in the Docker container logs:",
|
|
||||||
" docker logs core-app --tail 50",
|
|
||||||
].join("\n"),
|
|
||||||
"🚀 Services Running"
|
|
||||||
);
|
|
||||||
outro("🎉 Setup Complete!");
|
|
||||||
process.exit(0);
|
|
||||||
} catch (error: any) {
|
|
||||||
outro(`❌ Setup failed: ${error.message}`);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
|
|
||||||
import "./cli/index.js";
|
|
||||||
@ -1,29 +0,0 @@
|
|||||||
import chalk from "chalk";
|
|
||||||
import { VERSION } from "../cli/version.js";
|
|
||||||
|
|
||||||
export function printCoreBrainLogo(): void {
|
|
||||||
const brain = `
|
|
||||||
██████╗ ██████╗ ██████╗ ███████╗
|
|
||||||
██╔════╝██╔═══██╗██╔══██╗██╔════╝
|
|
||||||
██║ ██║ ██║██████╔╝█████╗
|
|
||||||
██║ ██║ ██║██╔══██╗██╔══╝
|
|
||||||
╚██████╗╚██████╔╝██║ ██║███████╗
|
|
||||||
╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚══════╝
|
|
||||||
|
|
||||||
o o o
|
|
||||||
o o---o---o o
|
|
||||||
o---o o o---o---o
|
|
||||||
o o---o---o---o o
|
|
||||||
o---o o o---o---o
|
|
||||||
o o---o---o o
|
|
||||||
o o o
|
|
||||||
|
|
||||||
`;
|
|
||||||
|
|
||||||
console.log(chalk.cyan(brain));
|
|
||||||
console.log(
|
|
||||||
chalk.bold.white(
|
|
||||||
` 🧠 CORE - Contextual Observation & Recall Engine ${VERSION ? chalk.gray(`(${VERSION})`) : ""}\n`
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
const EnvironmentSchema = z.object({
|
|
||||||
// Version
|
|
||||||
VERSION: z.string().default("0.1.14"),
|
|
||||||
|
|
||||||
// Database
|
|
||||||
DB_HOST: z.string().default("localhost"),
|
|
||||||
DB_PORT: z.string().default("5432"),
|
|
||||||
TRIGGER_DB: z.string().default("trigger"),
|
|
||||||
POSTGRES_USER: z.string().default("docker"),
|
|
||||||
POSTGRES_PASSWORD: z.string().default("docker"),
|
|
||||||
|
|
||||||
// Trigger database
|
|
||||||
TRIGGER_TASKS_IMAGE: z.string().default("redplanethq/proj_core:latest"),
|
|
||||||
|
|
||||||
// Node environment
|
|
||||||
NODE_ENV: z
|
|
||||||
.union([z.literal("development"), z.literal("production"), z.literal("test")])
|
|
||||||
.default("development"),
|
|
||||||
});
|
|
||||||
|
|
||||||
export type Environment = z.infer<typeof EnvironmentSchema>;
|
|
||||||
export const env = EnvironmentSchema.parse(process.env);
|
|
||||||
@ -1,182 +0,0 @@
|
|||||||
import Knex from "knex";
|
|
||||||
import path from "path";
|
|
||||||
import { fileURLToPath } from "url";
|
|
||||||
import { env } from "./env.js";
|
|
||||||
import { spinner, note, log } from "@clack/prompts";
|
|
||||||
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
|
||||||
const __dirname = path.dirname(__filename);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a PostgreSQL database URL for the given database name.
|
|
||||||
* Throws if required environment variables are missing.
|
|
||||||
*/
|
|
||||||
export function getDatabaseUrl(dbName: string): string {
|
|
||||||
const { POSTGRES_USER, POSTGRES_PASSWORD, DB_HOST, DB_PORT } = env;
|
|
||||||
|
|
||||||
if (!POSTGRES_USER || !POSTGRES_PASSWORD || !DB_HOST || !DB_PORT || !dbName) {
|
|
||||||
throw new Error(
|
|
||||||
"One or more required environment variables are missing: POSTGRES_USER, POSTGRES_PASSWORD, DB_HOST, DB_PORT, dbName"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return `postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${DB_HOST}:${DB_PORT}/${dbName}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks if the database specified by TRIGGER_DB exists, and creates it if it does not.
|
|
||||||
* Returns { exists: boolean, created: boolean } - exists indicates success, created indicates if database was newly created.
|
|
||||||
*/
|
|
||||||
export async function ensureDatabaseExists(): Promise<{ exists: boolean; created: boolean }> {
|
|
||||||
const { TRIGGER_DB } = env;
|
|
||||||
|
|
||||||
if (!TRIGGER_DB) {
|
|
||||||
throw new Error("TRIGGER_DB environment variable is missing");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build a connection string to the default 'postgres' database
|
|
||||||
const adminDbUrl = getDatabaseUrl("postgres");
|
|
||||||
|
|
||||||
// Create a Knex instance for the admin connection
|
|
||||||
const adminKnex = Knex({
|
|
||||||
client: "pg",
|
|
||||||
connection: adminDbUrl,
|
|
||||||
});
|
|
||||||
|
|
||||||
const s = spinner();
|
|
||||||
s.start("Checking for Trigger.dev database...");
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Check if the database exists
|
|
||||||
const result = await adminKnex.select(1).from("pg_database").where("datname", TRIGGER_DB);
|
|
||||||
|
|
||||||
if (result.length === 0) {
|
|
||||||
s.message("Database not found. Creating...");
|
|
||||||
// Database does not exist, create it
|
|
||||||
await adminKnex.raw(`CREATE DATABASE "${TRIGGER_DB}"`);
|
|
||||||
s.stop("Database created.");
|
|
||||||
return { exists: true, created: true };
|
|
||||||
} else {
|
|
||||||
s.stop("Database exists.");
|
|
||||||
return { exists: true, created: false };
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
s.stop("Failed to ensure database exists.");
|
|
||||||
log.warning("Failed to ensure database exists: " + (err as Error).message);
|
|
||||||
return { exists: false, created: false };
|
|
||||||
} finally {
|
|
||||||
await adminKnex.destroy();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Main initialization function
|
|
||||||
export async function initTriggerDatabase() {
|
|
||||||
const { TRIGGER_DB } = env;
|
|
||||||
|
|
||||||
if (!TRIGGER_DB) {
|
|
||||||
throw new Error("TRIGGER_DB environment variable is missing");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure the database exists
|
|
||||||
const { exists, created } = await ensureDatabaseExists();
|
|
||||||
if (!exists) {
|
|
||||||
throw new Error("Failed to create or verify database exists");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Only run pg_restore if the database was newly created
|
|
||||||
if (!created) {
|
|
||||||
note("Database already exists, skipping restore from trigger.dump");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run pg_restore with the trigger.dump file
|
|
||||||
const dumpFilePath = path.join(__dirname, "../../../trigger.dump");
|
|
||||||
const connectionString = getDatabaseUrl(TRIGGER_DB);
|
|
||||||
|
|
||||||
const s = spinner();
|
|
||||||
s.start("Restoring database from trigger.dump...");
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Use execSync and capture stdout/stderr, send to spinner.log
|
|
||||||
const { spawn } = await import("child_process");
|
|
||||||
await new Promise<void>((resolve, reject) => {
|
|
||||||
const child = spawn(
|
|
||||||
"pg_restore",
|
|
||||||
["--verbose", "--no-acl", "--no-owner", "-d", connectionString, dumpFilePath],
|
|
||||||
{ stdio: ["ignore", "pipe", "pipe"] }
|
|
||||||
);
|
|
||||||
|
|
||||||
child.stdout.on("data", (data) => {
|
|
||||||
s.message(data.toString());
|
|
||||||
});
|
|
||||||
|
|
||||||
child.stderr.on("data", (data) => {
|
|
||||||
s.message(data.toString());
|
|
||||||
});
|
|
||||||
|
|
||||||
child.on("close", (code) => {
|
|
||||||
if (code === 0) {
|
|
||||||
s.stop("Database restored successfully from trigger.dump");
|
|
||||||
resolve();
|
|
||||||
} else {
|
|
||||||
s.stop("Failed to restore database.");
|
|
||||||
log.warning(`Failed to restore database: pg_restore exited with code ${code}`);
|
|
||||||
reject(new Error(`Database restore failed: pg_restore exited with code ${code}`));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
child.on("error", (err) => {
|
|
||||||
s.stop("Failed to restore database.");
|
|
||||||
log.warning("Failed to restore database: " + err.message);
|
|
||||||
reject(new Error(`Database restore failed: ${err.message}`));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
} catch (error: any) {
|
|
||||||
s.stop("Failed to restore database.");
|
|
||||||
log.warning("Failed to restore database: " + error.message);
|
|
||||||
throw new Error(`Database restore failed: ${error.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function updateWorkerImage() {
|
|
||||||
const { TRIGGER_DB, TRIGGER_TASKS_IMAGE } = env;
|
|
||||||
|
|
||||||
if (!TRIGGER_DB) {
|
|
||||||
throw new Error("TRIGGER_DB environment variable is missing");
|
|
||||||
}
|
|
||||||
|
|
||||||
const connectionString = getDatabaseUrl(TRIGGER_DB);
|
|
||||||
|
|
||||||
const knex = Knex({
|
|
||||||
client: "pg",
|
|
||||||
connection: connectionString,
|
|
||||||
});
|
|
||||||
|
|
||||||
const s = spinner();
|
|
||||||
s.start("Updating worker image reference...");
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Get the first record from WorkerDeployment table
|
|
||||||
const firstWorkerDeployment = await knex("WorkerDeployment").select("id").first();
|
|
||||||
|
|
||||||
if (!firstWorkerDeployment) {
|
|
||||||
s.stop("No WorkerDeployment records found, skipping image update");
|
|
||||||
note("No WorkerDeployment records found, skipping image update");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the imageReference column with the TRIGGER_TASKS_IMAGE value
|
|
||||||
await knex("WorkerDeployment").where("id", firstWorkerDeployment.id).update({
|
|
||||||
imageReference: TRIGGER_TASKS_IMAGE,
|
|
||||||
updatedAt: new Date(),
|
|
||||||
});
|
|
||||||
|
|
||||||
s.stop(`Successfully updated worker image reference to: ${TRIGGER_TASKS_IMAGE}`);
|
|
||||||
} catch (error: any) {
|
|
||||||
s.stop("Failed to update worker image.");
|
|
||||||
log.warning("Failed to update worker image: " + error.message);
|
|
||||||
throw new Error(`Worker image update failed: ${error.message}`);
|
|
||||||
} finally {
|
|
||||||
await knex.destroy();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Binary file not shown.
@ -1,40 +0,0 @@
|
|||||||
{
|
|
||||||
"include": ["./src/**/*.ts"],
|
|
||||||
"exclude": ["./src/**/*.test.ts"],
|
|
||||||
"compilerOptions": {
|
|
||||||
"target": "es2022",
|
|
||||||
"lib": ["ES2022", "DOM", "DOM.Iterable", "DOM.AsyncIterable"],
|
|
||||||
"module": "NodeNext",
|
|
||||||
"moduleResolution": "NodeNext",
|
|
||||||
"moduleDetection": "force",
|
|
||||||
"verbatimModuleSyntax": false,
|
|
||||||
"jsx": "react",
|
|
||||||
|
|
||||||
"strict": true,
|
|
||||||
"alwaysStrict": true,
|
|
||||||
"strictPropertyInitialization": true,
|
|
||||||
"skipLibCheck": true,
|
|
||||||
"forceConsistentCasingInFileNames": true,
|
|
||||||
"noUnusedLocals": false,
|
|
||||||
"noUnusedParameters": false,
|
|
||||||
"noImplicitAny": true,
|
|
||||||
"noImplicitReturns": true,
|
|
||||||
"noImplicitThis": true,
|
|
||||||
|
|
||||||
"noFallthroughCasesInSwitch": true,
|
|
||||||
"resolveJsonModule": true,
|
|
||||||
|
|
||||||
"removeComments": false,
|
|
||||||
"esModuleInterop": true,
|
|
||||||
"emitDecoratorMetadata": false,
|
|
||||||
"experimentalDecorators": false,
|
|
||||||
"downlevelIteration": true,
|
|
||||||
"isolatedModules": true,
|
|
||||||
"noUncheckedIndexedAccess": true,
|
|
||||||
|
|
||||||
"pretty": true,
|
|
||||||
"isolatedDeclarations": false,
|
|
||||||
"composite": true,
|
|
||||||
"sourceMap": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,8 +0,0 @@
|
|||||||
import { configDefaults, defineConfig } from "vitest/config";
|
|
||||||
|
|
||||||
export default defineConfig({
|
|
||||||
test: {
|
|
||||||
globals: true,
|
|
||||||
exclude: [...configDefaults.exclude, "e2e/**/*"],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
50
apps/webapp/app/bullmq/connection.ts
Normal file
50
apps/webapp/app/bullmq/connection.ts
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
import Redis, { type RedisOptions } from "ioredis";
|
||||||
|
|
||||||
|
let redisConnection: Redis | null = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get or create a Redis connection for BullMQ
|
||||||
|
* This connection is shared across all queues and workers
|
||||||
|
*/
|
||||||
|
export function getRedisConnection() {
|
||||||
|
if (redisConnection) {
|
||||||
|
return redisConnection;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dynamically import ioredis only when needed
|
||||||
|
|
||||||
|
const redisConfig: RedisOptions = {
|
||||||
|
host: process.env.REDIS_HOST,
|
||||||
|
port: parseInt(process.env.REDIS_PORT as string),
|
||||||
|
password: process.env.REDIS_PASSWORD,
|
||||||
|
maxRetriesPerRequest: null, // Required for BullMQ
|
||||||
|
enableReadyCheck: false, // Required for BullMQ
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add TLS configuration if not disabled
|
||||||
|
if (!process.env.REDIS_TLS_DISABLED) {
|
||||||
|
redisConfig.tls = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
redisConnection = new Redis(redisConfig);
|
||||||
|
|
||||||
|
redisConnection.on("error", (error) => {
|
||||||
|
console.error("Redis connection error:", error);
|
||||||
|
});
|
||||||
|
|
||||||
|
redisConnection.on("connect", () => {
|
||||||
|
console.log("Redis connected successfully");
|
||||||
|
});
|
||||||
|
|
||||||
|
return redisConnection;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close the Redis connection (useful for graceful shutdown)
|
||||||
|
*/
|
||||||
|
export async function closeRedisConnection(): Promise<void> {
|
||||||
|
if (redisConnection) {
|
||||||
|
await redisConnection.quit();
|
||||||
|
redisConnection = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
160
apps/webapp/app/bullmq/queues/index.ts
Normal file
160
apps/webapp/app/bullmq/queues/index.ts
Normal file
@ -0,0 +1,160 @@
|
|||||||
|
/**
|
||||||
|
* BullMQ Queues
|
||||||
|
*
|
||||||
|
* All queue definitions for the BullMQ implementation
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Queue } from "bullmq";
|
||||||
|
import { getRedisConnection } from "../connection";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Episode ingestion queue
|
||||||
|
* Handles individual episode ingestion (including document chunks)
|
||||||
|
*/
|
||||||
|
export const ingestQueue = new Queue("ingest-queue", {
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600, // Keep completed jobs for 1 hour
|
||||||
|
count: 1000, // Keep last 1000 completed jobs
|
||||||
|
},
|
||||||
|
removeOnFail: {
|
||||||
|
age: 86400, // Keep failed jobs for 24 hours
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Document ingestion queue
|
||||||
|
* Handles document-level ingestion with differential processing
|
||||||
|
*/
|
||||||
|
export const documentIngestQueue = new Queue("document-ingest-queue", {
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600,
|
||||||
|
count: 1000,
|
||||||
|
},
|
||||||
|
removeOnFail: {
|
||||||
|
age: 86400,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Conversation title creation queue
|
||||||
|
*/
|
||||||
|
export const conversationTitleQueue = new Queue("conversation-title-queue", {
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600,
|
||||||
|
count: 1000,
|
||||||
|
},
|
||||||
|
removeOnFail: {
|
||||||
|
age: 86400,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session compaction queue
|
||||||
|
*/
|
||||||
|
export const sessionCompactionQueue = new Queue("session-compaction-queue", {
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600,
|
||||||
|
count: 1000,
|
||||||
|
},
|
||||||
|
removeOnFail: {
|
||||||
|
age: 86400,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* BERT topic analysis queue
|
||||||
|
* Handles CPU-intensive topic modeling on user episodes
|
||||||
|
*/
|
||||||
|
export const bertTopicQueue = new Queue("bert-topic-queue", {
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 2, // Only 2 attempts due to long runtime
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 5000,
|
||||||
|
},
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 7200, // Keep completed jobs for 2 hours
|
||||||
|
count: 100,
|
||||||
|
},
|
||||||
|
removeOnFail: {
|
||||||
|
age: 172800, // Keep failed jobs for 48 hours (for debugging)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Space assignment queue
|
||||||
|
* Handles assigning episodes to spaces based on semantic matching
|
||||||
|
*/
|
||||||
|
export const spaceAssignmentQueue = new Queue("space-assignment-queue", {
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600,
|
||||||
|
count: 1000,
|
||||||
|
},
|
||||||
|
removeOnFail: {
|
||||||
|
age: 86400,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Space summary queue
|
||||||
|
* Handles generating summaries for spaces
|
||||||
|
*/
|
||||||
|
export const spaceSummaryQueue = new Queue("space-summary-queue", {
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600,
|
||||||
|
count: 1000,
|
||||||
|
},
|
||||||
|
removeOnFail: {
|
||||||
|
age: 86400,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
154
apps/webapp/app/bullmq/start-workers.ts
Normal file
154
apps/webapp/app/bullmq/start-workers.ts
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
/**
|
||||||
|
* BullMQ Worker Startup Script
|
||||||
|
*
|
||||||
|
* This script starts all BullMQ workers for processing background jobs.
|
||||||
|
* Run this as a separate process alongside your main application.
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* tsx apps/webapp/app/bullmq/start-workers.ts
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import {
|
||||||
|
ingestWorker,
|
||||||
|
documentIngestWorker,
|
||||||
|
conversationTitleWorker,
|
||||||
|
sessionCompactionWorker,
|
||||||
|
closeAllWorkers,
|
||||||
|
bertTopicWorker,
|
||||||
|
spaceAssignmentWorker,
|
||||||
|
spaceSummaryWorker,
|
||||||
|
} from "./workers";
|
||||||
|
import {
|
||||||
|
ingestQueue,
|
||||||
|
documentIngestQueue,
|
||||||
|
conversationTitleQueue,
|
||||||
|
sessionCompactionQueue,
|
||||||
|
bertTopicQueue,
|
||||||
|
spaceAssignmentQueue,
|
||||||
|
spaceSummaryQueue,
|
||||||
|
} from "./queues";
|
||||||
|
import {
|
||||||
|
setupWorkerLogging,
|
||||||
|
startPeriodicMetricsLogging,
|
||||||
|
} from "./utils/worker-logger";
|
||||||
|
|
||||||
|
let metricsInterval: NodeJS.Timeout | null = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize and start all BullMQ workers with comprehensive logging
|
||||||
|
*/
|
||||||
|
export async function initWorkers(): Promise<void> {
|
||||||
|
// Setup comprehensive logging for all workers
|
||||||
|
setupWorkerLogging(ingestWorker, ingestQueue, "ingest-episode");
|
||||||
|
setupWorkerLogging(
|
||||||
|
documentIngestWorker,
|
||||||
|
documentIngestQueue,
|
||||||
|
"ingest-document",
|
||||||
|
);
|
||||||
|
setupWorkerLogging(
|
||||||
|
conversationTitleWorker,
|
||||||
|
conversationTitleQueue,
|
||||||
|
"conversation-title",
|
||||||
|
);
|
||||||
|
|
||||||
|
setupWorkerLogging(
|
||||||
|
sessionCompactionWorker,
|
||||||
|
sessionCompactionQueue,
|
||||||
|
"session-compaction",
|
||||||
|
);
|
||||||
|
|
||||||
|
setupWorkerLogging(bertTopicWorker, bertTopicQueue, "bert-topic");
|
||||||
|
|
||||||
|
setupWorkerLogging(
|
||||||
|
spaceAssignmentWorker,
|
||||||
|
spaceAssignmentQueue,
|
||||||
|
"space-assignment",
|
||||||
|
);
|
||||||
|
|
||||||
|
setupWorkerLogging(spaceSummaryWorker, spaceSummaryQueue, "space-summary");
|
||||||
|
|
||||||
|
// Start periodic metrics logging (every 60 seconds)
|
||||||
|
metricsInterval = startPeriodicMetricsLogging(
|
||||||
|
[
|
||||||
|
{ worker: ingestWorker, queue: ingestQueue, name: "ingest-episode" },
|
||||||
|
{
|
||||||
|
worker: documentIngestWorker,
|
||||||
|
queue: documentIngestQueue,
|
||||||
|
name: "ingest-document",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
worker: conversationTitleWorker,
|
||||||
|
queue: conversationTitleQueue,
|
||||||
|
name: "conversation-title",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
worker: sessionCompactionWorker,
|
||||||
|
queue: sessionCompactionQueue,
|
||||||
|
name: "session-compaction",
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
worker: bertTopicWorker,
|
||||||
|
queue: bertTopicQueue,
|
||||||
|
name: "bert-topic",
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
worker: spaceAssignmentWorker,
|
||||||
|
queue: spaceAssignmentQueue,
|
||||||
|
name: "space-assignment",
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
worker: spaceSummaryWorker,
|
||||||
|
queue: spaceAssignmentQueue,
|
||||||
|
name: "space-summary",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
60000, // Log metrics every 60 seconds
|
||||||
|
);
|
||||||
|
|
||||||
|
// Log worker startup
|
||||||
|
logger.log("\n🚀 Starting BullMQ workers...");
|
||||||
|
logger.log("─".repeat(80));
|
||||||
|
logger.log(`✓ Ingest worker: ${ingestWorker.name} (concurrency: 5)`);
|
||||||
|
logger.log(
|
||||||
|
`✓ Document ingest worker: ${documentIngestWorker.name} (concurrency: 3)`,
|
||||||
|
);
|
||||||
|
logger.log(
|
||||||
|
`✓ Conversation title worker: ${conversationTitleWorker.name} (concurrency: 10)`,
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.log(
|
||||||
|
`✓ Session compaction worker: ${sessionCompactionWorker.name} (concurrency: 3)`,
|
||||||
|
);
|
||||||
|
logger.log("─".repeat(80));
|
||||||
|
logger.log("✅ All BullMQ workers started and listening for jobs");
|
||||||
|
logger.log("📊 Metrics will be logged every 60 seconds\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Shutdown all workers gracefully
|
||||||
|
*/
|
||||||
|
export async function shutdownWorkers(): Promise<void> {
|
||||||
|
logger.log("Shutdown signal received, closing workers gracefully...");
|
||||||
|
if (metricsInterval) {
|
||||||
|
clearInterval(metricsInterval);
|
||||||
|
}
|
||||||
|
await closeAllWorkers();
|
||||||
|
}
|
||||||
|
|
||||||
|
// If running as standalone script, initialize workers
|
||||||
|
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||||
|
initWorkers();
|
||||||
|
|
||||||
|
// Handle graceful shutdown
|
||||||
|
const shutdown = async () => {
|
||||||
|
await shutdownWorkers();
|
||||||
|
process.exit(0);
|
||||||
|
};
|
||||||
|
|
||||||
|
process.on("SIGTERM", shutdown);
|
||||||
|
process.on("SIGINT", shutdown);
|
||||||
|
}
|
||||||
132
apps/webapp/app/bullmq/utils/job-finder.ts
Normal file
132
apps/webapp/app/bullmq/utils/job-finder.ts
Normal file
@ -0,0 +1,132 @@
|
|||||||
|
/**
|
||||||
|
* BullMQ Job Finder Utilities
|
||||||
|
*
|
||||||
|
* Helper functions to find, retrieve, and cancel BullMQ jobs
|
||||||
|
*/
|
||||||
|
|
||||||
|
interface JobInfo {
|
||||||
|
id: string;
|
||||||
|
isCompleted: boolean;
|
||||||
|
status?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all active queues
|
||||||
|
*/
|
||||||
|
async function getAllQueues() {
|
||||||
|
const {
|
||||||
|
ingestQueue,
|
||||||
|
documentIngestQueue,
|
||||||
|
conversationTitleQueue,
|
||||||
|
sessionCompactionQueue,
|
||||||
|
} = await import("../queues");
|
||||||
|
|
||||||
|
return [
|
||||||
|
ingestQueue,
|
||||||
|
documentIngestQueue,
|
||||||
|
conversationTitleQueue,
|
||||||
|
sessionCompactionQueue,
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find jobs by tags (metadata stored in job data)
|
||||||
|
* Since BullMQ doesn't have native tag support like Trigger.dev,
|
||||||
|
* we search through jobs and check if their data contains the required identifiers
|
||||||
|
*/
|
||||||
|
export async function getJobsByTags(
|
||||||
|
tags: string[],
|
||||||
|
taskIdentifier?: string,
|
||||||
|
): Promise<JobInfo[]> {
|
||||||
|
const queues = await getAllQueues();
|
||||||
|
const matchingJobs: JobInfo[] = [];
|
||||||
|
|
||||||
|
for (const queue of queues) {
|
||||||
|
// Skip if taskIdentifier is specified and doesn't match queue name
|
||||||
|
if (taskIdentifier && !queue.name.includes(taskIdentifier)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get all active and waiting jobs
|
||||||
|
const [active, waiting, delayed] = await Promise.all([
|
||||||
|
queue.getActive(),
|
||||||
|
queue.getWaiting(),
|
||||||
|
queue.getDelayed(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const allJobs = [...active, ...waiting, ...delayed];
|
||||||
|
|
||||||
|
for (const job of allJobs) {
|
||||||
|
// Check if job data contains all required tags
|
||||||
|
const jobData = job.data as any;
|
||||||
|
const matchesTags = tags.every(
|
||||||
|
(tag) =>
|
||||||
|
job.id?.includes(tag) ||
|
||||||
|
jobData.userId === tag ||
|
||||||
|
jobData.workspaceId === tag ||
|
||||||
|
jobData.queueId === tag,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (matchesTags) {
|
||||||
|
const state = await job.getState();
|
||||||
|
matchingJobs.push({
|
||||||
|
id: job.id!,
|
||||||
|
isCompleted: state === "completed" || state === "failed",
|
||||||
|
status: state,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return matchingJobs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a specific job by ID across all queues
|
||||||
|
*/
|
||||||
|
export async function getJobById(jobId: string): Promise<JobInfo | null> {
|
||||||
|
const queues = await getAllQueues();
|
||||||
|
|
||||||
|
for (const queue of queues) {
|
||||||
|
try {
|
||||||
|
const job = await queue.getJob(jobId);
|
||||||
|
if (job) {
|
||||||
|
const state = await job.getState();
|
||||||
|
return {
|
||||||
|
id: job.id!,
|
||||||
|
isCompleted: state === "completed" || state === "failed",
|
||||||
|
status: state,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Job not in this queue, continue
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cancel a job by ID
|
||||||
|
*/
|
||||||
|
export async function cancelJobById(jobId: string): Promise<void> {
|
||||||
|
const queues = await getAllQueues();
|
||||||
|
|
||||||
|
for (const queue of queues) {
|
||||||
|
try {
|
||||||
|
const job = await queue.getJob(jobId);
|
||||||
|
if (job) {
|
||||||
|
const state = await job.getState();
|
||||||
|
// Only remove if not already completed
|
||||||
|
if (state !== "completed" && state !== "failed") {
|
||||||
|
await job.remove();
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Job not in this queue, continue
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
184
apps/webapp/app/bullmq/utils/worker-logger.ts
Normal file
184
apps/webapp/app/bullmq/utils/worker-logger.ts
Normal file
@ -0,0 +1,184 @@
|
|||||||
|
/**
|
||||||
|
* BullMQ Worker Logger
|
||||||
|
*
|
||||||
|
* Comprehensive logging utility for tracking worker status, queue metrics,
|
||||||
|
* and job lifecycle events
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { type Worker, type Queue } from "bullmq";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
|
||||||
|
interface WorkerMetrics {
|
||||||
|
name: string;
|
||||||
|
concurrency: number;
|
||||||
|
activeJobs: number;
|
||||||
|
waitingJobs: number;
|
||||||
|
delayedJobs: number;
|
||||||
|
failedJobs: number;
|
||||||
|
completedJobs: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup comprehensive logging for a worker
|
||||||
|
*/
|
||||||
|
export function setupWorkerLogging(
|
||||||
|
worker: Worker,
|
||||||
|
queue: Queue,
|
||||||
|
workerName: string,
|
||||||
|
): void {
|
||||||
|
// Job picked up and started processing
|
||||||
|
worker.on("active", async (job) => {
|
||||||
|
const counts = await getQueueCounts(queue);
|
||||||
|
logger.log(
|
||||||
|
`[${workerName}] 🔄 Job started: ${job.id} | Queue: ${counts.waiting} waiting, ${counts.active} active, ${counts.delayed} delayed`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Job completed successfully
|
||||||
|
worker.on("completed", async (job, result) => {
|
||||||
|
const counts = await getQueueCounts(queue);
|
||||||
|
const duration = job.finishedOn ? job.finishedOn - job.processedOn! : 0;
|
||||||
|
logger.log(
|
||||||
|
`[${workerName}] ✅ Job completed: ${job.id} (${duration}ms) | Queue: ${counts.waiting} waiting, ${counts.active} active`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Job failed
|
||||||
|
worker.on("failed", async (job, error) => {
|
||||||
|
const counts = await getQueueCounts(queue);
|
||||||
|
const attempt = job?.attemptsMade || 0;
|
||||||
|
const maxAttempts = job?.opts?.attempts || 3;
|
||||||
|
logger.error(
|
||||||
|
`[${workerName}] ❌ Job failed: ${job?.id} (attempt ${attempt}/${maxAttempts}) | Error: ${error.message} | Queue: ${counts.waiting} waiting, ${counts.failed} failed`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Job progress update (if job reports progress)
|
||||||
|
worker.on("progress", async (job, progress) => {
|
||||||
|
logger.log(`[${workerName}] 📊 Job progress: ${job.id} - ${progress}%`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Worker stalled (job took too long)
|
||||||
|
worker.on("stalled", async (jobId) => {
|
||||||
|
logger.warn(`[${workerName}] ⚠️ Job stalled: ${jobId}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Worker error
|
||||||
|
worker.on("error", (error) => {
|
||||||
|
logger.error(`[${workerName}] 🔥 Worker error: ${error.message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Worker closed
|
||||||
|
worker.on("closed", () => {
|
||||||
|
logger.log(`[${workerName}] 🛑 Worker closed`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get queue counts for logging
|
||||||
|
*/
|
||||||
|
async function getQueueCounts(queue: Queue): Promise<{
|
||||||
|
waiting: number;
|
||||||
|
active: number;
|
||||||
|
delayed: number;
|
||||||
|
failed: number;
|
||||||
|
completed: number;
|
||||||
|
}> {
|
||||||
|
try {
|
||||||
|
const counts = await queue.getJobCounts(
|
||||||
|
"waiting",
|
||||||
|
"active",
|
||||||
|
"delayed",
|
||||||
|
"failed",
|
||||||
|
"completed",
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
waiting: counts.waiting || 0,
|
||||||
|
active: counts.active || 0,
|
||||||
|
delayed: counts.delayed || 0,
|
||||||
|
failed: counts.failed || 0,
|
||||||
|
completed: counts.completed || 0,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
return { waiting: 0, active: 0, delayed: 0, failed: 0, completed: 0 };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get metrics for all workers
|
||||||
|
*/
|
||||||
|
export async function getAllWorkerMetrics(
|
||||||
|
workers: Array<{ worker: Worker; queue: Queue; name: string }>,
|
||||||
|
): Promise<WorkerMetrics[]> {
|
||||||
|
const metrics = await Promise.all(
|
||||||
|
workers.map(async ({ worker, queue, name }) => {
|
||||||
|
const counts = await getQueueCounts(queue);
|
||||||
|
return {
|
||||||
|
name,
|
||||||
|
concurrency: worker.opts.concurrency || 1,
|
||||||
|
activeJobs: counts.active,
|
||||||
|
waitingJobs: counts.waiting,
|
||||||
|
delayedJobs: counts.delayed,
|
||||||
|
failedJobs: counts.failed,
|
||||||
|
completedJobs: counts.completed,
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
return metrics;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log worker metrics summary
|
||||||
|
*/
|
||||||
|
export function logWorkerMetrics(metrics: WorkerMetrics[]): void {
|
||||||
|
logger.log("\n📊 BullMQ Worker Metrics:");
|
||||||
|
logger.log("─".repeat(80));
|
||||||
|
|
||||||
|
for (const metric of metrics) {
|
||||||
|
logger.log(
|
||||||
|
`[${metric.name.padEnd(25)}] Concurrency: ${metric.concurrency} | ` +
|
||||||
|
`Active: ${metric.activeJobs} | Waiting: ${metric.waitingJobs} | ` +
|
||||||
|
`Delayed: ${metric.delayedJobs} | Failed: ${metric.failedJobs} | ` +
|
||||||
|
`Completed: ${metric.completedJobs}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const totals = metrics.reduce(
|
||||||
|
(acc, m) => ({
|
||||||
|
active: acc.active + m.activeJobs,
|
||||||
|
waiting: acc.waiting + m.waitingJobs,
|
||||||
|
delayed: acc.delayed + m.delayedJobs,
|
||||||
|
failed: acc.failed + m.failedJobs,
|
||||||
|
completed: acc.completed + m.completedJobs,
|
||||||
|
}),
|
||||||
|
{ active: 0, waiting: 0, delayed: 0, failed: 0, completed: 0 },
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.log("─".repeat(80));
|
||||||
|
logger.log(
|
||||||
|
`[TOTAL] Active: ${totals.active} | Waiting: ${totals.waiting} | ` +
|
||||||
|
`Delayed: ${totals.delayed} | Failed: ${totals.failed} | ` +
|
||||||
|
`Completed: ${totals.completed}`,
|
||||||
|
);
|
||||||
|
logger.log("─".repeat(80) + "\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start periodic metrics logging
|
||||||
|
*/
|
||||||
|
export function startPeriodicMetricsLogging(
|
||||||
|
workers: Array<{ worker: Worker; queue: Queue; name: string }>,
|
||||||
|
intervalMs: number = 60000, // Default: 1 minute
|
||||||
|
): NodeJS.Timeout {
|
||||||
|
const logMetrics = async () => {
|
||||||
|
const metrics = await getAllWorkerMetrics(workers);
|
||||||
|
logWorkerMetrics(metrics);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Log immediately on start
|
||||||
|
logMetrics();
|
||||||
|
|
||||||
|
// Then log periodically
|
||||||
|
return setInterval(logMetrics, intervalMs);
|
||||||
|
}
|
||||||
200
apps/webapp/app/bullmq/workers/index.ts
Normal file
200
apps/webapp/app/bullmq/workers/index.ts
Normal file
@ -0,0 +1,200 @@
|
|||||||
|
/**
|
||||||
|
* BullMQ Workers
|
||||||
|
*
|
||||||
|
* All worker definitions for processing background jobs with BullMQ
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Worker } from "bullmq";
|
||||||
|
import { getRedisConnection } from "../connection";
|
||||||
|
import {
|
||||||
|
processEpisodeIngestion,
|
||||||
|
type IngestEpisodePayload,
|
||||||
|
} from "~/jobs/ingest/ingest-episode.logic";
|
||||||
|
import {
|
||||||
|
processDocumentIngestion,
|
||||||
|
type IngestDocumentPayload,
|
||||||
|
} from "~/jobs/ingest/ingest-document.logic";
|
||||||
|
import {
|
||||||
|
processConversationTitleCreation,
|
||||||
|
type CreateConversationTitlePayload,
|
||||||
|
} from "~/jobs/conversation/create-title.logic";
|
||||||
|
import {
|
||||||
|
processSessionCompaction,
|
||||||
|
type SessionCompactionPayload,
|
||||||
|
} from "~/jobs/session/session-compaction.logic";
|
||||||
|
import {
|
||||||
|
processTopicAnalysis,
|
||||||
|
type TopicAnalysisPayload,
|
||||||
|
} from "~/jobs/bert/topic-analysis.logic";
|
||||||
|
|
||||||
|
import {
|
||||||
|
enqueueIngestEpisode,
|
||||||
|
enqueueSpaceAssignment,
|
||||||
|
enqueueSessionCompaction,
|
||||||
|
enqueueBertTopicAnalysis,
|
||||||
|
enqueueSpaceSummary,
|
||||||
|
} from "~/lib/queue-adapter.server";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import {
|
||||||
|
processSpaceAssignment,
|
||||||
|
type SpaceAssignmentPayload,
|
||||||
|
} from "~/jobs/spaces/space-assignment.logic";
|
||||||
|
import {
|
||||||
|
processSpaceSummary,
|
||||||
|
type SpaceSummaryPayload,
|
||||||
|
} from "~/jobs/spaces/space-summary.logic";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Episode ingestion worker
|
||||||
|
* Processes individual episode ingestion jobs with global concurrency
|
||||||
|
*
|
||||||
|
* Note: BullMQ uses global concurrency limit (5 jobs max).
|
||||||
|
* Trigger.dev uses per-user concurrency via concurrencyKey.
|
||||||
|
* For most open-source deployments, global concurrency is sufficient.
|
||||||
|
*/
|
||||||
|
export const ingestWorker = new Worker(
|
||||||
|
"ingest-queue",
|
||||||
|
async (job) => {
|
||||||
|
const payload = job.data as IngestEpisodePayload;
|
||||||
|
|
||||||
|
return await processEpisodeIngestion(
|
||||||
|
payload,
|
||||||
|
// Callbacks to enqueue follow-up jobs
|
||||||
|
enqueueSpaceAssignment,
|
||||||
|
enqueueSessionCompaction,
|
||||||
|
enqueueBertTopicAnalysis,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
concurrency: 1, // Global limit: process up to 1 jobs in parallel
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Document ingestion worker
|
||||||
|
* Handles document-level ingestion with differential processing
|
||||||
|
*
|
||||||
|
* Note: Per-user concurrency is achieved by using userId as part of the jobId
|
||||||
|
* when adding jobs to the queue
|
||||||
|
*/
|
||||||
|
export const documentIngestWorker = new Worker(
|
||||||
|
"document-ingest-queue",
|
||||||
|
async (job) => {
|
||||||
|
const payload = job.data as IngestDocumentPayload;
|
||||||
|
return await processDocumentIngestion(
|
||||||
|
payload,
|
||||||
|
// Callback to enqueue episode ingestion for each chunk
|
||||||
|
enqueueIngestEpisode,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
concurrency: 3, // Process up to 3 documents in parallel
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Conversation title creation worker
|
||||||
|
*/
|
||||||
|
export const conversationTitleWorker = new Worker(
|
||||||
|
"conversation-title-queue",
|
||||||
|
async (job) => {
|
||||||
|
const payload = job.data as CreateConversationTitlePayload;
|
||||||
|
return await processConversationTitleCreation(payload);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
concurrency: 10, // Process up to 10 title creations in parallel
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session compaction worker
|
||||||
|
*/
|
||||||
|
export const sessionCompactionWorker = new Worker(
|
||||||
|
"session-compaction-queue",
|
||||||
|
async (job) => {
|
||||||
|
const payload = job.data as SessionCompactionPayload;
|
||||||
|
return await processSessionCompaction(payload);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
concurrency: 3, // Process up to 3 compactions in parallel
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* BERT topic analysis worker
|
||||||
|
* Handles CPU-intensive topic modeling
|
||||||
|
*/
|
||||||
|
export const bertTopicWorker = new Worker(
|
||||||
|
"bert-topic-queue",
|
||||||
|
async (job) => {
|
||||||
|
const payload = job.data as TopicAnalysisPayload;
|
||||||
|
return await processTopicAnalysis(
|
||||||
|
payload,
|
||||||
|
// Callback to enqueue space summary
|
||||||
|
enqueueSpaceSummary,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
concurrency: 2, // Process up to 2 analyses in parallel (CPU-intensive)
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Space assignment worker
|
||||||
|
* Handles assigning episodes to spaces based on semantic matching
|
||||||
|
*
|
||||||
|
* Note: Global concurrency of 1 ensures sequential processing.
|
||||||
|
* Trigger.dev uses per-user concurrency via concurrencyKey.
|
||||||
|
*/
|
||||||
|
export const spaceAssignmentWorker = new Worker(
|
||||||
|
"space-assignment-queue",
|
||||||
|
async (job) => {
|
||||||
|
const payload = job.data as SpaceAssignmentPayload;
|
||||||
|
return await processSpaceAssignment(
|
||||||
|
payload,
|
||||||
|
// Callback to enqueue space summary
|
||||||
|
enqueueSpaceSummary,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
concurrency: 1, // Global limit: process one job at a time
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Space summary worker
|
||||||
|
* Handles generating summaries for spaces
|
||||||
|
*/
|
||||||
|
export const spaceSummaryWorker = new Worker(
|
||||||
|
"space-summary-queue",
|
||||||
|
async (job) => {
|
||||||
|
const payload = job.data as SpaceSummaryPayload;
|
||||||
|
return await processSpaceSummary(payload);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection: getRedisConnection(),
|
||||||
|
concurrency: 1, // Process one space summary at a time
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Graceful shutdown handler
|
||||||
|
*/
|
||||||
|
export async function closeAllWorkers(): Promise<void> {
|
||||||
|
await Promise.all([
|
||||||
|
ingestWorker.close(),
|
||||||
|
documentIngestWorker.close(),
|
||||||
|
conversationTitleWorker.close(),
|
||||||
|
sessionCompactionWorker.close(),
|
||||||
|
bertTopicWorker.close(),
|
||||||
|
spaceSummaryWorker.close(),
|
||||||
|
spaceAssignmentWorker.close(),
|
||||||
|
]);
|
||||||
|
logger.log("All BullMQ workers closed");
|
||||||
|
}
|
||||||
@ -28,7 +28,8 @@ export const useTokensColumns = (): Array<ColumnDef<PersonalAccessToken>> => {
|
|||||||
const [open, setOpen] = React.useState(false);
|
const [open, setOpen] = React.useState(false);
|
||||||
|
|
||||||
const onDelete = (id: string) => {
|
const onDelete = (id: string) => {
|
||||||
fetcher.submit({ id }, { method: "DELETE", action: "/home/api" });
|
fetcher.submit({ id }, { method: "DELETE", action: "/settings/api" });
|
||||||
|
setOpen(false);
|
||||||
};
|
};
|
||||||
|
|
||||||
return [
|
return [
|
||||||
|
|||||||
@ -0,0 +1,71 @@
|
|||||||
|
import { useState } from "react";
|
||||||
|
import { FileText, Plus } from "lucide-react";
|
||||||
|
import {
|
||||||
|
CommandDialog,
|
||||||
|
CommandGroup,
|
||||||
|
CommandInput,
|
||||||
|
CommandItem,
|
||||||
|
CommandList,
|
||||||
|
} from "../ui/command";
|
||||||
|
import { AddMemoryDialog } from "./memory-dialog.client";
|
||||||
|
import { AddDocumentDialog } from "./document-dialog";
|
||||||
|
|
||||||
|
interface AddMemoryCommandProps {
|
||||||
|
open: boolean;
|
||||||
|
onOpenChange: (open: boolean) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function AddMemoryCommand({
|
||||||
|
open,
|
||||||
|
onOpenChange,
|
||||||
|
}: AddMemoryCommandProps) {
|
||||||
|
const [showAddMemory, setShowAddMemory] = useState(false);
|
||||||
|
const [showAddDocument, setShowAddDocument] = useState(false);
|
||||||
|
|
||||||
|
const handleAddMemory = () => {
|
||||||
|
onOpenChange(false);
|
||||||
|
setShowAddMemory(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleAddDocument = () => {
|
||||||
|
onOpenChange(false);
|
||||||
|
setShowAddDocument(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
{/* Main Command Dialog */}
|
||||||
|
<CommandDialog open={open} onOpenChange={onOpenChange}>
|
||||||
|
<CommandInput placeholder="Search" className="py-1" />
|
||||||
|
<CommandList>
|
||||||
|
<CommandGroup heading="Add to Memory">
|
||||||
|
<CommandItem
|
||||||
|
onSelect={handleAddMemory}
|
||||||
|
className="flex items-center gap-2 py-1"
|
||||||
|
>
|
||||||
|
<Plus className="mr-2 h-4 w-4" />
|
||||||
|
<span>Add Memory</span>
|
||||||
|
</CommandItem>
|
||||||
|
<CommandItem
|
||||||
|
onSelect={handleAddDocument}
|
||||||
|
className="flex items-center gap-2 py-1"
|
||||||
|
>
|
||||||
|
<FileText className="mr-2 h-4 w-4" />
|
||||||
|
<span>Add Document</span>
|
||||||
|
</CommandItem>
|
||||||
|
</CommandGroup>
|
||||||
|
</CommandList>
|
||||||
|
</CommandDialog>
|
||||||
|
|
||||||
|
{showAddMemory && (
|
||||||
|
<AddMemoryDialog open={showAddMemory} onOpenChange={setShowAddMemory} />
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Add Document Dialog */}
|
||||||
|
<AddDocumentDialog
|
||||||
|
open={showAddDocument}
|
||||||
|
onOpenChange={setShowAddDocument}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
27
apps/webapp/app/components/command-bar/document-dialog.tsx
Normal file
27
apps/webapp/app/components/command-bar/document-dialog.tsx
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
import { Dialog, DialogContent, DialogHeader, DialogTitle } from "../ui/dialog";
|
||||||
|
|
||||||
|
interface AddDocumentDialogProps {
|
||||||
|
open: boolean;
|
||||||
|
onOpenChange: (open: boolean) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function AddDocumentDialog({
|
||||||
|
open,
|
||||||
|
onOpenChange,
|
||||||
|
}: AddDocumentDialogProps) {
|
||||||
|
return (
|
||||||
|
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||||
|
<DialogContent className="sm:max-w-[600px]">
|
||||||
|
<DialogHeader>
|
||||||
|
<DialogTitle>Add Document</DialogTitle>
|
||||||
|
</DialogHeader>
|
||||||
|
{/* TODO: Add document content here */}
|
||||||
|
<div className="border-border rounded-md border p-4">
|
||||||
|
<p className="text-muted-foreground text-sm">
|
||||||
|
Document upload content goes here...
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</DialogContent>
|
||||||
|
</Dialog>
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -0,0 +1,95 @@
|
|||||||
|
import { Dialog, DialogContent, DialogHeader, DialogTitle } from "../ui/dialog";
|
||||||
|
import { useEditor, EditorContent } from "@tiptap/react";
|
||||||
|
import {
|
||||||
|
extensionsForConversation,
|
||||||
|
getPlaceholder,
|
||||||
|
} from "../conversation/editor-extensions";
|
||||||
|
import { Button } from "../ui/button";
|
||||||
|
import { SpaceDropdown } from "../spaces/space-dropdown";
|
||||||
|
import React from "react";
|
||||||
|
import { useFetcher } from "@remix-run/react";
|
||||||
|
|
||||||
|
interface AddMemoryDialogProps {
|
||||||
|
open: boolean;
|
||||||
|
onOpenChange: (open: boolean) => void;
|
||||||
|
defaultSpaceId?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function AddMemoryDialog({
|
||||||
|
open,
|
||||||
|
onOpenChange,
|
||||||
|
defaultSpaceId,
|
||||||
|
}: AddMemoryDialogProps) {
|
||||||
|
const [spaceIds, setSpaceIds] = React.useState<string[]>(
|
||||||
|
defaultSpaceId ? [defaultSpaceId] : [],
|
||||||
|
);
|
||||||
|
const fetcher = useFetcher();
|
||||||
|
const editor = useEditor({
|
||||||
|
extensions: [
|
||||||
|
...extensionsForConversation,
|
||||||
|
getPlaceholder("Write your memory here..."),
|
||||||
|
],
|
||||||
|
editorProps: {
|
||||||
|
attributes: {
|
||||||
|
class:
|
||||||
|
"prose prose-sm focus:outline-none max-w-full min-h-[200px] p-4 py-0",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const handleAdd = async () => {
|
||||||
|
const content = editor?.getText();
|
||||||
|
if (!content?.trim()) return;
|
||||||
|
|
||||||
|
const payload = {
|
||||||
|
episodeBody: content,
|
||||||
|
referenceTime: new Date().toISOString(),
|
||||||
|
spaceIds: spaceIds,
|
||||||
|
source: "core",
|
||||||
|
};
|
||||||
|
|
||||||
|
fetcher.submit(payload, {
|
||||||
|
method: "POST",
|
||||||
|
action: "/api/v1/add",
|
||||||
|
encType: "application/json",
|
||||||
|
});
|
||||||
|
|
||||||
|
// Clear editor and close dialog
|
||||||
|
editor?.commands.clearContent();
|
||||||
|
setSpaceIds([]);
|
||||||
|
onOpenChange(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||||
|
<DialogContent className="pt-0 sm:max-w-[600px]">
|
||||||
|
<div className="overflow-hidden rounded-md">
|
||||||
|
<EditorContent editor={editor} />
|
||||||
|
</div>
|
||||||
|
<div className="flex justify-between gap-2 px-4 pb-4">
|
||||||
|
<div>
|
||||||
|
<SpaceDropdown
|
||||||
|
episodeIds={[]}
|
||||||
|
selectedSpaceIds={spaceIds}
|
||||||
|
onSpaceChange={(spaceIds) => {
|
||||||
|
setSpaceIds(spaceIds);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex gap-2">
|
||||||
|
<Button variant="ghost" onClick={() => onOpenChange(false)}>
|
||||||
|
Cancel
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
variant="secondary"
|
||||||
|
onClick={handleAdd}
|
||||||
|
isLoading={fetcher.state !== "idle"}
|
||||||
|
>
|
||||||
|
Add
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</DialogContent>
|
||||||
|
</Dialog>
|
||||||
|
);
|
||||||
|
}
|
||||||
219
apps/webapp/app/components/common/styled-markdown.tsx
Normal file
219
apps/webapp/app/components/common/styled-markdown.tsx
Normal file
@ -0,0 +1,219 @@
|
|||||||
|
import ReactMarkdown, {type Components } from "react-markdown";
|
||||||
|
import { cn } from "~/lib/utils";
|
||||||
|
|
||||||
|
const markdownComponents: Components = {
|
||||||
|
h1: ({ className, ...props }) => (
|
||||||
|
<h1
|
||||||
|
className={cn("mt-2 mb-1 text-3xl font-bold tracking-tight", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
h2: ({ className, ...props }) => (
|
||||||
|
<h2
|
||||||
|
className={cn(
|
||||||
|
"mt-2 mb-1 text-2xl font-semibold tracking-tight",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
h3: ({ className, ...props }) => (
|
||||||
|
<h3
|
||||||
|
className={cn(
|
||||||
|
"mt-2 mb-1 text-xl font-semibold tracking-tight",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
h4: ({ className, ...props }) => (
|
||||||
|
<h4
|
||||||
|
className={cn(
|
||||||
|
"mt-1.5 mb-0.5 text-lg font-semibold tracking-tight",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
h5: ({ className, ...props }) => (
|
||||||
|
<h5
|
||||||
|
className={cn(
|
||||||
|
"mt-1.5 mb-0.5 text-base font-semibold tracking-tight",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
h6: ({ className, ...props }) => (
|
||||||
|
<h6
|
||||||
|
className={cn(
|
||||||
|
"mt-1.5 mb-0.5 text-sm font-semibold tracking-tight",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
p: ({ className, ...props }) => (
|
||||||
|
<p
|
||||||
|
className={cn(
|
||||||
|
"mb-1 leading-normal [&:not(:first-child)]:mt-1",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
ul: ({ className, ...props }) => (
|
||||||
|
<ul
|
||||||
|
className={cn(
|
||||||
|
"my-1 ml-5 flex list-disc flex-col space-y-0 marker:text-gray-700 dark:marker:text-gray-400",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
ol: ({ className, ...props }) => (
|
||||||
|
<ol
|
||||||
|
className={cn(
|
||||||
|
"my-1 ml-5 list-decimal space-y-0 marker:text-gray-700 dark:marker:text-gray-400",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
li: ({ className, ...props }) => (
|
||||||
|
<li className={cn("py-0.5 pl-1 leading-normal", className)} {...props} />
|
||||||
|
),
|
||||||
|
blockquote: ({ className, ...props }) => (
|
||||||
|
<blockquote
|
||||||
|
className={cn(
|
||||||
|
"mt-1 mb-1 border-l-4 border-gray-300 pl-4 text-gray-700 italic dark:border-gray-600 dark:text-gray-300",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
code: ({ className, inline, ...props }: any) =>
|
||||||
|
inline ? (
|
||||||
|
<code
|
||||||
|
className={cn(
|
||||||
|
"rounded bg-gray-100 px-1.5 py-0.5 font-mono text-sm text-gray-800 dark:bg-gray-800 dark:text-gray-200",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<code
|
||||||
|
className={cn(
|
||||||
|
"block rounded-lg bg-gray-100 p-4 font-mono text-sm text-gray-800 dark:bg-gray-800 dark:text-gray-200",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
pre: ({ className, ...props }) => (
|
||||||
|
<pre
|
||||||
|
className={cn(
|
||||||
|
"mb-1 overflow-x-auto rounded-lg bg-gray-100 p-4 dark:bg-gray-800",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
a: ({ className, ...props }) => (
|
||||||
|
<a
|
||||||
|
className={cn(
|
||||||
|
"font-medium text-blue-600 underline underline-offset-4 hover:text-blue-800 dark:text-blue-400 dark:hover:text-blue-300",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
hr: ({ className, ...props }) => (
|
||||||
|
<hr
|
||||||
|
className={cn(
|
||||||
|
"my-2 border-t border-gray-300 dark:border-gray-600",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
table: ({ className, ...props }) => (
|
||||||
|
<div className="mb-1 w-full overflow-auto">
|
||||||
|
<table
|
||||||
|
className={cn(
|
||||||
|
"w-full border-collapse border border-gray-300 dark:border-gray-600",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
),
|
||||||
|
thead: ({ className, ...props }) => (
|
||||||
|
<thead
|
||||||
|
className={cn("bg-gray-100 dark:bg-gray-800", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
tbody: ({ className, ...props }) => (
|
||||||
|
<tbody className={cn("", className)} {...props} />
|
||||||
|
),
|
||||||
|
tr: ({ className, ...props }) => (
|
||||||
|
<tr
|
||||||
|
className={cn("border-b border-gray-300 dark:border-gray-600", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
th: ({ className, ...props }) => (
|
||||||
|
<th
|
||||||
|
className={cn(
|
||||||
|
"border border-gray-300 px-4 py-2 text-left font-semibold dark:border-gray-600",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
td: ({ className, ...props }) => (
|
||||||
|
<td
|
||||||
|
className={cn(
|
||||||
|
"border border-gray-300 px-4 py-2 dark:border-gray-600",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
strong: ({ className, ...props }) => (
|
||||||
|
<strong className={cn("font-bold", className)} {...props} />
|
||||||
|
),
|
||||||
|
em: ({ className, ...props }) => (
|
||||||
|
<em className={cn("italic", className)} {...props} />
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
interface StyledMarkdownProps {
|
||||||
|
children: string;
|
||||||
|
className?: string;
|
||||||
|
components?: Components;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function StyledMarkdown({
|
||||||
|
children,
|
||||||
|
className,
|
||||||
|
components,
|
||||||
|
}: StyledMarkdownProps) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
className={cn(
|
||||||
|
"max-w-none",
|
||||||
|
"[&_ul_ul]:my-0.5 [&_ul_ul]:ml-4",
|
||||||
|
"[&_ol_ol]:my-0.5 [&_ol_ol]:ml-4",
|
||||||
|
"[&_ul_ol]:my-0.5 [&_ul_ol]:ml-4",
|
||||||
|
"[&_ol_ul]:my-0.5 [&_ol_ul]:ml-4",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
<ReactMarkdown components={{ ...markdownComponents, ...components }}>
|
||||||
|
{children}
|
||||||
|
</ReactMarkdown>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -1,38 +1,42 @@
|
|||||||
import { EditorContent, useEditor } from "@tiptap/react";
|
import { EditorContent, useEditor } from "@tiptap/react";
|
||||||
|
|
||||||
import { useEffect } from "react";
|
import { useEffect, memo } from "react";
|
||||||
import { UserTypeEnum } from "@core/types";
|
|
||||||
import { type ConversationHistory } from "@core/database";
|
|
||||||
import { cn } from "~/lib/utils";
|
import { cn } from "~/lib/utils";
|
||||||
import { extensionsForConversation } from "./editor-extensions";
|
import { extensionsForConversation } from "./editor-extensions";
|
||||||
import { skillExtension } from "../editor/skill-extension";
|
import { skillExtension } from "../editor/skill-extension";
|
||||||
|
import { type UIMessage } from "ai";
|
||||||
|
|
||||||
interface AIConversationItemProps {
|
interface AIConversationItemProps {
|
||||||
conversationHistory: ConversationHistory;
|
message: UIMessage;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const ConversationItem = ({
|
function getMessage(message: string) {
|
||||||
conversationHistory,
|
let finalMessage = message.replace("<final_response>", "");
|
||||||
}: AIConversationItemProps) => {
|
finalMessage = finalMessage.replace("</final_response>", "");
|
||||||
const isUser =
|
finalMessage = finalMessage.replace("<question_response>", "");
|
||||||
conversationHistory.userType === UserTypeEnum.User ||
|
finalMessage = finalMessage.replace("</question_response>", "");
|
||||||
conversationHistory.userType === UserTypeEnum.System;
|
|
||||||
|
|
||||||
const id = `a${conversationHistory.id.replace(/-/g, "")}`;
|
return finalMessage;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ConversationItemComponent = ({ message }: AIConversationItemProps) => {
|
||||||
|
const isUser = message.role === "user" || false;
|
||||||
|
const textPart = message.parts.find((part) => part.type === "text");
|
||||||
|
|
||||||
const editor = useEditor({
|
const editor = useEditor({
|
||||||
extensions: [...extensionsForConversation, skillExtension],
|
extensions: [...extensionsForConversation, skillExtension],
|
||||||
editable: false,
|
editable: false,
|
||||||
content: conversationHistory.message,
|
content: textPart ? getMessage(textPart.text) : "",
|
||||||
});
|
});
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
editor?.commands.setContent(conversationHistory.message);
|
if (textPart) {
|
||||||
|
editor?.commands.setContent(getMessage(textPart.text));
|
||||||
|
}
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [id, conversationHistory.message]);
|
}, [message]);
|
||||||
|
|
||||||
if (!conversationHistory.message) {
|
if (!message) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -49,3 +53,12 @@ export const ConversationItem = ({
|
|||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Memoize to prevent unnecessary re-renders
|
||||||
|
export const ConversationItem = memo(
|
||||||
|
ConversationItemComponent,
|
||||||
|
(prevProps, nextProps) => {
|
||||||
|
// Only re-render if the conversation history ID or message changed
|
||||||
|
return prevProps.message === nextProps.message;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|||||||
@ -57,9 +57,7 @@ export const ConversationList = ({
|
|||||||
limit: "5", // Increased for better density
|
limit: "5", // Increased for better density
|
||||||
});
|
});
|
||||||
|
|
||||||
fetcher.load(`/api/v1/conversations?${searchParams}`, {
|
fetcher.load(`/api/v1/conversations?${searchParams}`);
|
||||||
flushSync: true,
|
|
||||||
});
|
|
||||||
},
|
},
|
||||||
[isLoading, fetcher],
|
[isLoading, fetcher],
|
||||||
);
|
);
|
||||||
|
|||||||
@ -5,28 +5,30 @@ import { Paragraph } from "@tiptap/extension-paragraph";
|
|||||||
import { Text } from "@tiptap/extension-text";
|
import { Text } from "@tiptap/extension-text";
|
||||||
import { type Editor } from "@tiptap/react";
|
import { type Editor } from "@tiptap/react";
|
||||||
import { EditorContent, Placeholder, EditorRoot } from "novel";
|
import { EditorContent, Placeholder, EditorRoot } from "novel";
|
||||||
import { useCallback, useState } from "react";
|
import { useCallback, useState, useEffect } from "react";
|
||||||
import { cn } from "~/lib/utils";
|
import { cn } from "~/lib/utils";
|
||||||
import { Button } from "../ui";
|
import { Button } from "../ui";
|
||||||
import { LoaderCircle } from "lucide-react";
|
import { LoaderCircle } from "lucide-react";
|
||||||
import { Form, useSubmit } from "@remix-run/react";
|
import { Form, useSubmit, useActionData } from "@remix-run/react";
|
||||||
|
|
||||||
interface ConversationTextareaProps {
|
interface ConversationTextareaProps {
|
||||||
defaultValue?: string;
|
defaultValue?: string;
|
||||||
conversationId: string;
|
|
||||||
placeholder?: string;
|
placeholder?: string;
|
||||||
isLoading?: boolean;
|
isLoading?: boolean;
|
||||||
className?: string;
|
className?: string;
|
||||||
onChange?: (text: string) => void;
|
onChange?: (text: string) => void;
|
||||||
disabled?: boolean;
|
disabled?: boolean;
|
||||||
|
onConversationCreated?: (message: string) => void;
|
||||||
|
stop?: () => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function ConversationTextarea({
|
export function ConversationTextarea({
|
||||||
defaultValue,
|
defaultValue,
|
||||||
isLoading = false,
|
isLoading = false,
|
||||||
placeholder,
|
placeholder,
|
||||||
conversationId,
|
|
||||||
onChange,
|
onChange,
|
||||||
|
onConversationCreated,
|
||||||
|
stop,
|
||||||
}: ConversationTextareaProps) {
|
}: ConversationTextareaProps) {
|
||||||
const [text, setText] = useState(defaultValue ?? "");
|
const [text, setText] = useState(defaultValue ?? "");
|
||||||
const [editor, setEditor] = useState<Editor>();
|
const [editor, setEditor] = useState<Editor>();
|
||||||
@ -42,131 +44,99 @@ export function ConversationTextarea({
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const data = isLoading ? {} : { message: text, conversationId };
|
onConversationCreated && onConversationCreated(text);
|
||||||
|
|
||||||
submit(data as any, {
|
|
||||||
action: isLoading
|
|
||||||
? `/home/conversation/${conversationId}`
|
|
||||||
: "/home/conversation",
|
|
||||||
method: "post",
|
|
||||||
});
|
|
||||||
|
|
||||||
editor?.commands.clearContent(true);
|
editor?.commands.clearContent(true);
|
||||||
setText("");
|
setText("");
|
||||||
|
|
||||||
editor.commands.clearContent(true);
|
|
||||||
setText("");
|
|
||||||
}, [editor, text]);
|
}, [editor, text]);
|
||||||
|
|
||||||
// Send message to API
|
|
||||||
const submitForm = useCallback(
|
|
||||||
async (e: React.FormEvent<HTMLFormElement>) => {
|
|
||||||
const data = isLoading
|
|
||||||
? {}
|
|
||||||
: { message: text, title: text, conversationId };
|
|
||||||
|
|
||||||
submit(data as any, {
|
|
||||||
action: isLoading
|
|
||||||
? `/home/conversation/${conversationId}`
|
|
||||||
: "/home/conversation",
|
|
||||||
method: "post",
|
|
||||||
});
|
|
||||||
|
|
||||||
editor?.commands.clearContent(true);
|
|
||||||
setText("");
|
|
||||||
e.preventDefault();
|
|
||||||
},
|
|
||||||
[text, conversationId],
|
|
||||||
);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Form
|
<div className="bg-background-3 rounded-lg border-1 border-gray-300 py-2">
|
||||||
action="/home/conversation"
|
<EditorRoot>
|
||||||
method="post"
|
<EditorContent
|
||||||
onSubmit={(e) => submitForm(e)}
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
className="pt-2"
|
initialContent={defaultValue as any}
|
||||||
>
|
extensions={[
|
||||||
<div className="bg-background-3 rounded-lg border-1 border-gray-300 py-2">
|
Document,
|
||||||
<EditorRoot>
|
Paragraph,
|
||||||
<EditorContent
|
Text,
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
HardBreak.configure({
|
||||||
initialContent={defaultValue as any}
|
keepMarks: true,
|
||||||
extensions={[
|
}),
|
||||||
Document,
|
|
||||||
Paragraph,
|
|
||||||
Text,
|
|
||||||
HardBreak.configure({
|
|
||||||
keepMarks: true,
|
|
||||||
}),
|
|
||||||
|
|
||||||
Placeholder.configure({
|
Placeholder.configure({
|
||||||
placeholder: () => placeholder ?? "Ask sol...",
|
placeholder: () => placeholder ?? "Ask sol...",
|
||||||
includeChildren: true,
|
includeChildren: true,
|
||||||
}),
|
}),
|
||||||
History,
|
History,
|
||||||
]}
|
]}
|
||||||
onCreate={async ({ editor }) => {
|
onCreate={async ({ editor }) => {
|
||||||
setEditor(editor);
|
setEditor(editor);
|
||||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||||
editor.commands.focus("end");
|
editor.commands.focus("end");
|
||||||
}}
|
}}
|
||||||
onUpdate={({ editor }) => {
|
onUpdate={({ editor }) => {
|
||||||
onUpdate(editor);
|
onUpdate(editor);
|
||||||
}}
|
}}
|
||||||
shouldRerenderOnTransaction={false}
|
shouldRerenderOnTransaction={false}
|
||||||
editorProps={{
|
editorProps={{
|
||||||
attributes: {
|
attributes: {
|
||||||
class: `prose prose-lg dark:prose-invert prose-headings:font-title font-default focus:outline-none max-w-full`,
|
class: `prose prose-lg dark:prose-invert prose-headings:font-title font-default focus:outline-none max-w-full`,
|
||||||
},
|
},
|
||||||
handleKeyDown(view, event) {
|
handleKeyDown(view, event) {
|
||||||
if (event.key === "Enter" && !event.shiftKey) {
|
if (event.key === "Enter" && !event.shiftKey) {
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
const target = event.target as any;
|
const target = event.target as any;
|
||||||
if (target.innerHTML.includes("suggestion")) {
|
if (target.innerHTML.includes("suggestion")) {
|
||||||
return false;
|
return false;
|
||||||
}
|
|
||||||
event.preventDefault();
|
|
||||||
if (text) {
|
|
||||||
handleSend();
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
event.preventDefault();
|
||||||
|
if (text) {
|
||||||
|
handleSend();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
if (event.key === "Enter" && event.shiftKey) {
|
if (event.key === "Enter" && event.shiftKey) {
|
||||||
view.dispatch(
|
view.dispatch(
|
||||||
view.state.tr.replaceSelectionWith(
|
view.state.tr.replaceSelectionWith(
|
||||||
view.state.schema.nodes.hardBreak.create(),
|
view.state.schema.nodes.hardBreak.create(),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
}}
|
}}
|
||||||
immediatelyRender={false}
|
immediatelyRender={false}
|
||||||
className={cn(
|
className={cn(
|
||||||
"editor-container text-md max-h-[400px] min-h-[40px] w-full min-w-full overflow-auto rounded-lg px-3",
|
"editor-container text-md max-h-[400px] min-h-[40px] w-full min-w-full overflow-auto rounded-lg px-3",
|
||||||
)}
|
)}
|
||||||
/>
|
/>
|
||||||
</EditorRoot>
|
</EditorRoot>
|
||||||
<div className="mb-1 flex justify-end px-3">
|
<div className="mb-1 flex justify-end px-3">
|
||||||
<Button
|
<Button
|
||||||
variant="default"
|
variant="default"
|
||||||
className="gap-1 shadow-none transition-all duration-500 ease-in-out"
|
className="gap-1 shadow-none transition-all duration-500 ease-in-out"
|
||||||
type="submit"
|
onClick={() => {
|
||||||
size="lg"
|
if (!isLoading) {
|
||||||
>
|
handleSend();
|
||||||
{isLoading ? (
|
} else {
|
||||||
<>
|
stop && stop();
|
||||||
<LoaderCircle size={18} className="mr-1 animate-spin" />
|
}
|
||||||
Stop
|
}}
|
||||||
</>
|
size="lg"
|
||||||
) : (
|
>
|
||||||
<>Chat</>
|
{isLoading ? (
|
||||||
)}
|
<>
|
||||||
</Button>
|
<LoaderCircle size={18} className="mr-1 animate-spin" />
|
||||||
</div>
|
Stop
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>Chat</>
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
</Form>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -9,6 +9,7 @@ import TableHeader from "@tiptap/extension-table-header";
|
|||||||
import TableRow from "@tiptap/extension-table-row";
|
import TableRow from "@tiptap/extension-table-row";
|
||||||
import { all, createLowlight } from "lowlight";
|
import { all, createLowlight } from "lowlight";
|
||||||
import { mergeAttributes, type Extension } from "@tiptap/react";
|
import { mergeAttributes, type Extension } from "@tiptap/react";
|
||||||
|
import { Markdown } from "tiptap-markdown";
|
||||||
|
|
||||||
// create a lowlight instance with all languages loaded
|
// create a lowlight instance with all languages loaded
|
||||||
export const lowlight = createLowlight(all);
|
export const lowlight = createLowlight(all);
|
||||||
@ -136,4 +137,5 @@ export const extensionsForConversation = [
|
|||||||
CodeBlockLowlight.configure({
|
CodeBlockLowlight.configure({
|
||||||
lowlight,
|
lowlight,
|
||||||
}),
|
}),
|
||||||
|
Markdown,
|
||||||
];
|
];
|
||||||
|
|||||||
@ -17,7 +17,7 @@ export const StreamingConversation = ({
|
|||||||
afterStreaming,
|
afterStreaming,
|
||||||
apiURL,
|
apiURL,
|
||||||
}: StreamingConversationProps) => {
|
}: StreamingConversationProps) => {
|
||||||
const { message, isEnd } = useTriggerStream(runId, token, apiURL);
|
const { message } = useTriggerStream(runId, token, apiURL, afterStreaming);
|
||||||
const [loadingText, setLoadingText] = React.useState("Thinking...");
|
const [loadingText, setLoadingText] = React.useState("Thinking...");
|
||||||
|
|
||||||
const loadingMessages = [
|
const loadingMessages = [
|
||||||
@ -48,13 +48,6 @@ export const StreamingConversation = ({
|
|||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [message]);
|
}, [message]);
|
||||||
|
|
||||||
React.useEffect(() => {
|
|
||||||
if (isEnd) {
|
|
||||||
afterStreaming();
|
|
||||||
}
|
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
}, [isEnd]);
|
|
||||||
|
|
||||||
React.useEffect(() => {
|
React.useEffect(() => {
|
||||||
let currentIndex = 0;
|
let currentIndex = 0;
|
||||||
let delay = 5000; // Start with 2 seconds for more thinking time
|
let delay = 5000; // Start with 2 seconds for more thinking time
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import { useRealtimeRunWithStreams } from "@trigger.dev/react-hooks";
|
import React, { useEffect, useState } from "react";
|
||||||
import React from "react";
|
import { EventSource, type ErrorEvent } from "eventsource";
|
||||||
|
|
||||||
const getTriggerAPIURL = (apiURL?: string) => {
|
const getTriggerAPIURL = (apiURL?: string) => {
|
||||||
return (
|
return (
|
||||||
@ -12,102 +12,53 @@ export const useTriggerStream = (
|
|||||||
runId: string,
|
runId: string,
|
||||||
token: string,
|
token: string,
|
||||||
apiURL?: string,
|
apiURL?: string,
|
||||||
|
afterStreaming?: (finalMessage: string) => void,
|
||||||
) => {
|
) => {
|
||||||
// Need to fix this later
|
// Need to fix this later
|
||||||
const baseURL = React.useMemo(() => getTriggerAPIURL(apiURL), [apiURL]);
|
const baseURL = React.useMemo(() => getTriggerAPIURL(apiURL), [apiURL]);
|
||||||
|
const [error, setError] = useState<ErrorEvent | null>(null);
|
||||||
|
const [message, setMessage] = useState("");
|
||||||
|
|
||||||
const { error, streams, run } = useRealtimeRunWithStreams(runId, {
|
useEffect(() => {
|
||||||
accessToken: token,
|
startStreaming();
|
||||||
baseURL, // Optional if you are using a self-hosted Trigger.dev instance
|
}, []);
|
||||||
});
|
|
||||||
|
|
||||||
const isEnd = React.useMemo(() => {
|
const startStreaming = () => {
|
||||||
if (error) {
|
const eventSource = new EventSource(
|
||||||
return true;
|
`${baseURL}/realtime/v1/streams/${runId}/messages`,
|
||||||
}
|
{
|
||||||
|
fetch: (input, init) =>
|
||||||
|
fetch(input, {
|
||||||
|
...init,
|
||||||
|
headers: {
|
||||||
|
...init.headers,
|
||||||
|
Authorization: `Bearer ${token}`,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
if (
|
eventSource.onmessage = (event) => {
|
||||||
run &&
|
try {
|
||||||
[
|
const eventData = JSON.parse(event.data);
|
||||||
"COMPLETED",
|
|
||||||
"CANCELED",
|
|
||||||
"FAILED",
|
|
||||||
"CRASHED",
|
|
||||||
"INTERRUPTED",
|
|
||||||
"SYSTEM_FAILURE",
|
|
||||||
"EXPIRED",
|
|
||||||
"TIMED_OUT",
|
|
||||||
].includes(run?.status)
|
|
||||||
) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasStreamEnd =
|
if (eventData.type.includes("MESSAGE_")) {
|
||||||
streams.messages &&
|
setMessage((prevMessage) => prevMessage + eventData.message);
|
||||||
streams.messages.filter((item) => {
|
}
|
||||||
// Check if the item has a type that includes 'MESSAGE_' and is not empty
|
} catch (e) {
|
||||||
return item.type?.includes("STREAM_END");
|
console.error("Failed to parse message:", e);
|
||||||
});
|
}
|
||||||
|
};
|
||||||
|
|
||||||
if (hasStreamEnd && hasStreamEnd.length > 0) {
|
eventSource.onerror = (err) => {
|
||||||
return true;
|
console.error("EventSource failed:", err);
|
||||||
}
|
setError(err);
|
||||||
|
eventSource.close();
|
||||||
|
if (afterStreaming) {
|
||||||
|
afterStreaming(message);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
return false;
|
return { error, message, actionMessages: [] };
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
}, [run?.status, error, streams.messages?.length]);
|
|
||||||
|
|
||||||
const message = React.useMemo(() => {
|
|
||||||
if (!streams?.messages) {
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filter and combine all message chunks
|
|
||||||
return streams.messages
|
|
||||||
.filter((item) => {
|
|
||||||
// Check if the item has a type that includes 'MESSAGE_' and is not empty
|
|
||||||
return item.type?.includes("MESSAGE_");
|
|
||||||
})
|
|
||||||
.map((item) => item.message)
|
|
||||||
.join("");
|
|
||||||
|
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
}, [streams.messages?.length]);
|
|
||||||
|
|
||||||
// const actionMessages = React.useMemo(() => {
|
|
||||||
// if (!streams?.messages) {
|
|
||||||
// return {};
|
|
||||||
// }
|
|
||||||
|
|
||||||
// // eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
// const messages: Record<string, { isStreaming: boolean; content: any[] }> =
|
|
||||||
// {};
|
|
||||||
|
|
||||||
// streams.messages.forEach((item) => {
|
|
||||||
// if (item.type?.includes("SKILL_")) {
|
|
||||||
// try {
|
|
||||||
// const parsed = JSON.parse(item.message);
|
|
||||||
// const skillId = parsed.skillId;
|
|
||||||
|
|
||||||
// if (!messages[skillId]) {
|
|
||||||
// messages[skillId] = { isStreaming: true, content: [] };
|
|
||||||
// }
|
|
||||||
|
|
||||||
// if (item.type === "SKILL_END") {
|
|
||||||
// messages[skillId].isStreaming = false;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// messages[skillId].content.push(parsed);
|
|
||||||
// } catch (e) {
|
|
||||||
// console.error("Failed to parse message:", e);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// });
|
|
||||||
|
|
||||||
// return messages;
|
|
||||||
|
|
||||||
// // eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
// }, [streams.messages?.length]);
|
|
||||||
|
|
||||||
return { isEnd, message, actionMessages: [] };
|
|
||||||
};
|
};
|
||||||
|
|||||||
@ -2,25 +2,20 @@ import { NodeViewWrapper } from "@tiptap/react";
|
|||||||
|
|
||||||
import React from "react";
|
import React from "react";
|
||||||
|
|
||||||
import { getIcon as iconUtil, type IconType } from "../../icon-utils";
|
import StaticLogo from "~/components/logo/logo";
|
||||||
|
|
||||||
import { ChevronDown, ChevronRight } from "lucide-react";
|
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
export const SkillComponent = (props: any) => {
|
export const SkillComponent = (props: any) => {
|
||||||
const id = props.node.attrs.id;
|
const id = props.node.attrs.id;
|
||||||
const name = props.node.attrs.name;
|
const name = props.node.attrs.name;
|
||||||
const agent = props.node.attrs.agent;
|
const agent = props.node.attrs.agent;
|
||||||
const [open, setOpen] = React.useState(false);
|
|
||||||
|
|
||||||
if (id === "undefined" || id === undefined || !name) {
|
if (id === "undefined" || id === undefined || !name) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const getIcon = () => {
|
const getIcon = () => {
|
||||||
const Icon = iconUtil(agent as IconType);
|
return <StaticLogo size={18} className="rounded-sm" />;
|
||||||
|
|
||||||
return <Icon size={18} className="rounded-sm" />;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const snakeToTitleCase = (input: string): string => {
|
const snakeToTitleCase = (input: string): string => {
|
||||||
@ -46,7 +41,7 @@ export const SkillComponent = (props: any) => {
|
|||||||
<>
|
<>
|
||||||
<div className="bg-grayAlpha-100 text-sm-md mt-0.5 flex w-fit items-center gap-2 rounded p-2">
|
<div className="bg-grayAlpha-100 text-sm-md mt-0.5 flex w-fit items-center gap-2 rounded p-2">
|
||||||
{getIcon()}
|
{getIcon()}
|
||||||
<span className="font-mono text-sm">{snakeToTitleCase(name)}</span>
|
<span className="font-mono text-sm">{snakeToTitleCase(agent)}</span>
|
||||||
</div>
|
</div>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
|
|||||||
@ -71,21 +71,26 @@ export const GraphClusteringVisualization = forwardRef<
|
|||||||
// Search filter
|
// Search filter
|
||||||
if (searchQuery.trim()) {
|
if (searchQuery.trim()) {
|
||||||
// Helper functions for filtering
|
// Helper functions for filtering
|
||||||
const isStatementNode = (node: any) => {
|
const isEpisodeNode = (node: any) => {
|
||||||
return (
|
return (
|
||||||
node.attributes?.fact ||
|
node.attributes?.content ||
|
||||||
(node.labels && node.labels.includes("Statement"))
|
node.attributes?.episodeUuid ||
|
||||||
|
(node.labels && node.labels.includes("Episode"))
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
const query = searchQuery.toLowerCase();
|
const query = searchQuery.toLowerCase();
|
||||||
filtered = filtered.filter((triplet) => {
|
filtered = filtered.filter((triplet) => {
|
||||||
const sourceMatches =
|
const sourceMatches =
|
||||||
isStatementNode(triplet.sourceNode) &&
|
isEpisodeNode(triplet.sourceNode) &&
|
||||||
triplet.sourceNode.attributes?.fact?.toLowerCase().includes(query);
|
triplet.sourceNode.attributes?.content
|
||||||
|
?.toLowerCase()
|
||||||
|
.includes(query);
|
||||||
const targetMatches =
|
const targetMatches =
|
||||||
isStatementNode(triplet.targetNode) &&
|
isEpisodeNode(triplet.targetNode) &&
|
||||||
triplet.targetNode.attributes?.fact?.toLowerCase().includes(query);
|
triplet.targetNode.attributes?.content
|
||||||
|
?.toLowerCase()
|
||||||
|
.includes(query);
|
||||||
|
|
||||||
return sourceMatches || targetMatches;
|
return sourceMatches || targetMatches;
|
||||||
});
|
});
|
||||||
|
|||||||
@ -192,13 +192,13 @@ export const GraphClustering = forwardRef<
|
|||||||
|
|
||||||
const nodeData = nodeDataMap.get(node.id) || node;
|
const nodeData = nodeDataMap.get(node.id) || node;
|
||||||
|
|
||||||
// Check if this is a Statement node
|
// Check if this is an Episode node
|
||||||
const isStatementNode =
|
const isEpisodeNode =
|
||||||
nodeData.attributes.nodeType === "Statement" ||
|
nodeData.attributes.nodeType === "Episode" ||
|
||||||
(nodeData.labels && nodeData.labels.includes("Statement"));
|
(nodeData.labels && nodeData.labels.includes("Episode"));
|
||||||
|
|
||||||
if (isStatementNode) {
|
if (isEpisodeNode) {
|
||||||
// Statement nodes with cluster IDs use cluster colors
|
// Episode nodes with cluster IDs use cluster colors
|
||||||
if (
|
if (
|
||||||
enableClusterColors &&
|
enableClusterColors &&
|
||||||
nodeData.clusterId &&
|
nodeData.clusterId &&
|
||||||
@ -207,7 +207,7 @@ export const GraphClustering = forwardRef<
|
|||||||
return clusterColorMap.get(nodeData.clusterId)!;
|
return clusterColorMap.get(nodeData.clusterId)!;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Unclustered statement nodes use a specific light color
|
// Unclustered episode nodes use a specific light color
|
||||||
return themeMode === "dark" ? "#2b9684" : "#54935b"; // Teal/Green from palette
|
return themeMode === "dark" ? "#2b9684" : "#54935b"; // Teal/Green from palette
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -229,10 +229,10 @@ export const GraphClustering = forwardRef<
|
|||||||
triplets.forEach((triplet) => {
|
triplets.forEach((triplet) => {
|
||||||
if (!nodeMap.has(triplet.source.id)) {
|
if (!nodeMap.has(triplet.source.id)) {
|
||||||
const nodeColor = getNodeColor(triplet.source);
|
const nodeColor = getNodeColor(triplet.source);
|
||||||
const isStatementNode =
|
const isEpisodeNode =
|
||||||
triplet.source.attributes?.nodeType === "Statement" ||
|
triplet.source.attributes?.nodeType === "Episode" ||
|
||||||
(triplet.source.labels &&
|
(triplet.source.labels &&
|
||||||
triplet.source.labels.includes("Statement"));
|
triplet.source.labels.includes("Episode"));
|
||||||
|
|
||||||
nodeMap.set(triplet.source.id, {
|
nodeMap.set(triplet.source.id, {
|
||||||
id: triplet.source.id,
|
id: triplet.source.id,
|
||||||
@ -240,23 +240,23 @@ export const GraphClustering = forwardRef<
|
|||||||
? triplet.source.value.split(/\s+/).slice(0, 4).join(" ") +
|
? triplet.source.value.split(/\s+/).slice(0, 4).join(" ") +
|
||||||
(triplet.source.value.split(/\s+/).length > 4 ? " ..." : "")
|
(triplet.source.value.split(/\s+/).length > 4 ? " ..." : "")
|
||||||
: "",
|
: "",
|
||||||
size: isStatementNode ? size : size / 2, // Statement nodes slightly larger
|
size: isEpisodeNode ? size : size / 2, // Episode nodes slightly larger
|
||||||
color: nodeColor,
|
color: nodeColor,
|
||||||
x: width,
|
x: width,
|
||||||
y: height,
|
y: height,
|
||||||
nodeData: triplet.source,
|
nodeData: triplet.source,
|
||||||
clusterId: triplet.source.clusterId,
|
clusterId: triplet.source.clusterId,
|
||||||
// Enhanced border for visual appeal, thicker for Statement nodes
|
// Enhanced border for visual appeal, thicker for Episode nodes
|
||||||
borderSize: 1,
|
borderSize: 1,
|
||||||
borderColor: nodeColor,
|
borderColor: nodeColor,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (!nodeMap.has(triplet.target.id)) {
|
if (!nodeMap.has(triplet.target.id)) {
|
||||||
const nodeColor = getNodeColor(triplet.target);
|
const nodeColor = getNodeColor(triplet.target);
|
||||||
const isStatementNode =
|
const isEpisodeNode =
|
||||||
triplet.target.attributes?.nodeType === "Statement" ||
|
triplet.target.attributes?.nodeType === "Episode" ||
|
||||||
(triplet.target.labels &&
|
(triplet.target.labels &&
|
||||||
triplet.target.labels.includes("Statement"));
|
triplet.target.labels.includes("Episode"));
|
||||||
|
|
||||||
nodeMap.set(triplet.target.id, {
|
nodeMap.set(triplet.target.id, {
|
||||||
id: triplet.target.id,
|
id: triplet.target.id,
|
||||||
@ -264,13 +264,13 @@ export const GraphClustering = forwardRef<
|
|||||||
? triplet.target.value.split(/\s+/).slice(0, 4).join(" ") +
|
? triplet.target.value.split(/\s+/).slice(0, 4).join(" ") +
|
||||||
(triplet.target.value.split(/\s+/).length > 4 ? " ..." : "")
|
(triplet.target.value.split(/\s+/).length > 4 ? " ..." : "")
|
||||||
: "",
|
: "",
|
||||||
size: isStatementNode ? size : size / 2, // Statement nodes slightly larger
|
size: isEpisodeNode ? size : size / 2, // Episode nodes slightly larger
|
||||||
color: nodeColor,
|
color: nodeColor,
|
||||||
x: width,
|
x: width,
|
||||||
y: height,
|
y: height,
|
||||||
nodeData: triplet.target,
|
nodeData: triplet.target,
|
||||||
clusterId: triplet.target.clusterId,
|
clusterId: triplet.target.clusterId,
|
||||||
// Enhanced border for visual appeal, thicker for Statement nodes
|
// Enhanced border for visual appeal, thicker for Episode nodes
|
||||||
borderSize: 1,
|
borderSize: 1,
|
||||||
borderColor: nodeColor,
|
borderColor: nodeColor,
|
||||||
});
|
});
|
||||||
@ -294,9 +294,9 @@ export const GraphClustering = forwardRef<
|
|||||||
target: triplet.target.id,
|
target: triplet.target.id,
|
||||||
relations: [],
|
relations: [],
|
||||||
relationData: [],
|
relationData: [],
|
||||||
label: "",
|
label: triplet.relation.value, // Show edge type (predicate for Subject->Object)
|
||||||
color: "#0000001A",
|
color: "#0000001A",
|
||||||
labelColor: "#0000001A",
|
labelColor: "#000000",
|
||||||
size: 1,
|
size: 1,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -327,13 +327,13 @@ export const GraphClustering = forwardRef<
|
|||||||
graph.forEachNode((node) => {
|
graph.forEachNode((node) => {
|
||||||
const nodeData = graph.getNodeAttribute(node, "nodeData");
|
const nodeData = graph.getNodeAttribute(node, "nodeData");
|
||||||
const originalColor = getNodeColor(nodeData);
|
const originalColor = getNodeColor(nodeData);
|
||||||
const isStatementNode =
|
const isEpisodeNode =
|
||||||
nodeData?.attributes.nodeType === "Statement" ||
|
nodeData?.attributes.nodeType === "Episode" ||
|
||||||
(nodeData?.labels && nodeData.labels.includes("Statement"));
|
(nodeData?.labels && nodeData.labels.includes("Episode"));
|
||||||
|
|
||||||
graph.setNodeAttribute(node, "highlighted", false);
|
graph.setNodeAttribute(node, "highlighted", false);
|
||||||
graph.setNodeAttribute(node, "color", originalColor);
|
graph.setNodeAttribute(node, "color", originalColor);
|
||||||
graph.setNodeAttribute(node, "size", isStatementNode ? size : size / 2);
|
graph.setNodeAttribute(node, "size", isEpisodeNode ? size : size / 2);
|
||||||
graph.setNodeAttribute(node, "zIndex", 1);
|
graph.setNodeAttribute(node, "zIndex", 1);
|
||||||
});
|
});
|
||||||
graph.forEachEdge((edge) => {
|
graph.forEachEdge((edge) => {
|
||||||
@ -551,19 +551,19 @@ export const GraphClustering = forwardRef<
|
|||||||
|
|
||||||
// Apply layout
|
// Apply layout
|
||||||
if (graph.order > 0) {
|
if (graph.order > 0) {
|
||||||
// Strong cluster-based positioning for Statement nodes only
|
// Strong cluster-based positioning for Episode nodes only
|
||||||
const clusterNodeMap = new Map<string, string[]>();
|
const clusterNodeMap = new Map<string, string[]>();
|
||||||
const entityNodes: string[] = [];
|
const entityNodes: string[] = [];
|
||||||
|
|
||||||
// Group Statement nodes by their cluster ID, separate Entity nodes
|
// Group Episode nodes by their cluster ID, separate Entity nodes
|
||||||
graph.forEachNode((nodeId, attributes) => {
|
graph.forEachNode((nodeId, attributes) => {
|
||||||
const isStatementNode =
|
const isEpisodeNode =
|
||||||
attributes.nodeData?.nodeType === "Statement" ||
|
attributes.nodeData?.nodeType === "Episode" ||
|
||||||
(attributes.nodeData?.labels &&
|
(attributes.nodeData?.labels &&
|
||||||
attributes.nodeData.labels.includes("Statement"));
|
attributes.nodeData.labels.includes("Episode"));
|
||||||
|
|
||||||
if (isStatementNode && attributes.clusterId) {
|
if (isEpisodeNode && attributes.clusterId) {
|
||||||
// Statement nodes with cluster IDs go into clusters
|
// Episode nodes with cluster IDs go into clusters
|
||||||
if (!clusterNodeMap.has(attributes.clusterId)) {
|
if (!clusterNodeMap.has(attributes.clusterId)) {
|
||||||
clusterNodeMap.set(attributes.clusterId, []);
|
clusterNodeMap.set(attributes.clusterId, []);
|
||||||
}
|
}
|
||||||
@ -640,7 +640,7 @@ export const GraphClustering = forwardRef<
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Position Entity nodes using ForceAtlas2 natural positioning
|
// Position Entity nodes using ForceAtlas2 natural positioning
|
||||||
// They will be positioned by the algorithm based on their connections to Statement nodes
|
// They will be positioned by the algorithm based on their connections to Episode nodes
|
||||||
entityNodes.forEach((nodeId) => {
|
entityNodes.forEach((nodeId) => {
|
||||||
// Give them initial random positions, ForceAtlas2 will adjust based on connections
|
// Give them initial random positions, ForceAtlas2 will adjust based on connections
|
||||||
graph.setNodeAttribute(nodeId, "x", Math.random() * width);
|
graph.setNodeAttribute(nodeId, "x", Math.random() * width);
|
||||||
@ -689,7 +689,7 @@ export const GraphClustering = forwardRef<
|
|||||||
defaultDrawNodeHover: drawHover,
|
defaultDrawNodeHover: drawHover,
|
||||||
|
|
||||||
maxCameraRatio: 2,
|
maxCameraRatio: 2,
|
||||||
allowInvalidContainer: false,
|
allowInvalidContainer: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
sigmaRef.current = sigma;
|
sigmaRef.current = sigma;
|
||||||
|
|||||||
@ -114,36 +114,6 @@ export const GraphVisualization = forwardRef<GraphRef, GraphVisualizationProps>(
|
|||||||
return (
|
return (
|
||||||
<div className={className}>
|
<div className={className}>
|
||||||
{/* Entity Types Legend Button */}
|
{/* Entity Types Legend Button */}
|
||||||
<div className="absolute top-4 left-4 z-50">
|
|
||||||
{/* <HoverCard>
|
|
||||||
<HoverCardTrigger asChild>
|
|
||||||
<button className="bg-primary/10 text-primary hover:bg-primary/20 rounded-md px-2.5 py-1 text-xs transition-colors">
|
|
||||||
Entity Types
|
|
||||||
</button>
|
|
||||||
</HoverCardTrigger>
|
|
||||||
<HoverCardContent className="w-40" side="bottom" align="start">
|
|
||||||
<div className="space-y-2">
|
|
||||||
<div className="max-h-[300px] space-y-1.5 overflow-y-auto pr-2">
|
|
||||||
{allLabels.map((label) => (
|
|
||||||
<div key={label} className="flex items-center gap-2">
|
|
||||||
<div
|
|
||||||
className="h-4 w-4 flex-shrink-0 rounded-full"
|
|
||||||
style={{
|
|
||||||
backgroundColor: getNodeColor(
|
|
||||||
label,
|
|
||||||
isDarkMode,
|
|
||||||
sharedLabelColorMap,
|
|
||||||
),
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
<span className="text-xs">{label}</span>
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</HoverCardContent>
|
|
||||||
</HoverCard> */}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{triplets.length > 0 ? (
|
{triplets.length > 0 ? (
|
||||||
<Graph
|
<Graph
|
||||||
|
|||||||
@ -284,52 +284,52 @@ export const Graph = forwardRef<GraphRef, GraphProps>(
|
|||||||
// More nodes = need more space to prevent overcrowding
|
// More nodes = need more space to prevent overcrowding
|
||||||
let scalingRatio: number;
|
let scalingRatio: number;
|
||||||
if (nodeCount < 10) {
|
if (nodeCount < 10) {
|
||||||
scalingRatio = 15; // Tight for small graphs
|
scalingRatio = 20; // Slightly wider for small graphs
|
||||||
} else if (nodeCount < 50) {
|
} else if (nodeCount < 50) {
|
||||||
scalingRatio = 20 + (nodeCount - 10) * 0.5; // Gradual increase
|
scalingRatio = 30 + (nodeCount - 10) * 1.0; // Faster increase
|
||||||
} else if (nodeCount < 200) {
|
} else if (nodeCount < 200) {
|
||||||
scalingRatio = 40 + (nodeCount - 50) * 0.2; // Slower increase
|
scalingRatio = 70 + (nodeCount - 50) * 0.5; // More spread
|
||||||
|
} else if (nodeCount < 500) {
|
||||||
|
scalingRatio = 145 + (nodeCount - 200) * 0.3; // Continue spreading
|
||||||
} else {
|
} else {
|
||||||
scalingRatio = Math.min(80, 70 + (nodeCount - 200) * 0.05); // Cap at 80
|
scalingRatio = Math.min(300, 235 + (nodeCount - 500) * 0.1); // Cap at 300
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate optimal gravity based on density and node count
|
// Calculate optimal gravity based on density and node count
|
||||||
let gravity: number;
|
let gravity: number;
|
||||||
if (density > 0.3) {
|
if (density > 0.3) {
|
||||||
// Dense graphs need less gravity to prevent overcrowding
|
// Dense graphs need less gravity to prevent overcrowding
|
||||||
gravity = 1 + density * 2;
|
gravity = 0.5 + density * 1.5;
|
||||||
} else if (density > 0.1) {
|
} else if (density > 0.1) {
|
||||||
// Medium density graphs
|
// Medium density graphs
|
||||||
gravity = 3 + density * 5;
|
gravity = 2 + density * 3;
|
||||||
} else {
|
} else {
|
||||||
// Sparse graphs need more gravity to keep components together
|
// Sparse graphs need more gravity to keep components together
|
||||||
gravity = Math.min(8, 5 + (1 - density) * 3);
|
gravity = Math.min(6, 4 + (1 - density) * 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adjust gravity based on node count
|
// Adjust gravity based on node count - more aggressive reduction for large graphs
|
||||||
if (nodeCount < 20) {
|
if (nodeCount < 20) {
|
||||||
gravity *= 1.5; // Smaller graphs benefit from stronger gravity
|
gravity *= 1.5; // Smaller graphs benefit from stronger gravity
|
||||||
} else if (nodeCount > 100) {
|
} else if (nodeCount > 100) {
|
||||||
gravity *= 0.8; // Larger graphs need gentler gravity
|
gravity *= 0.5; // Larger graphs need much gentler gravity
|
||||||
|
} else if (nodeCount > 200) {
|
||||||
|
gravity *= 0.3; // Very large graphs need very gentle gravity
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate iterations based on complexity
|
// Calculate iterations based on complexity
|
||||||
const complexity = nodeCount + edgeCount;
|
const complexity = nodeCount + edgeCount;
|
||||||
let iterations: number;
|
let iterations: number;
|
||||||
if (complexity < 50) {
|
if (complexity < 500) {
|
||||||
iterations = 400;
|
iterations = complexity;
|
||||||
} else if (complexity < 200) {
|
|
||||||
iterations = 600;
|
|
||||||
} else if (complexity < 500) {
|
|
||||||
iterations = 800;
|
|
||||||
} else {
|
} else {
|
||||||
iterations = Math.min(1200, 1000 + complexity * 0.2);
|
iterations = Math.min(600, 500 + complexity * 0.2);
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
scalingRatio: Math.round(scalingRatio * 10) / 10,
|
scalingRatio: Math.round(scalingRatio * 10) / 10,
|
||||||
gravity: Math.round(gravity * 10) / 10,
|
gravity: Math.round(gravity * 10) / 10,
|
||||||
iterations: Math.round(iterations),
|
iterations: Math.round(complexity),
|
||||||
};
|
};
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
@ -378,10 +378,10 @@ export const Graph = forwardRef<GraphRef, GraphProps>(
|
|||||||
settings: {
|
settings: {
|
||||||
...settings,
|
...settings,
|
||||||
barnesHutOptimize: true,
|
barnesHutOptimize: true,
|
||||||
strongGravityMode: true,
|
strongGravityMode: false, // Disable strong gravity for more spread
|
||||||
gravity: optimalParams.gravity,
|
gravity: optimalParams.gravity,
|
||||||
scalingRatio: optimalParams.scalingRatio,
|
scalingRatio: optimalParams.scalingRatio,
|
||||||
slowDown: 3,
|
slowDown: 1.5, // Reduced slowDown for better spreading
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -407,6 +407,7 @@ export const Graph = forwardRef<GraphRef, GraphProps>(
|
|||||||
enableEdgeEvents: true,
|
enableEdgeEvents: true,
|
||||||
minCameraRatio: 0.1,
|
minCameraRatio: 0.1,
|
||||||
maxCameraRatio: 2,
|
maxCameraRatio: 2,
|
||||||
|
allowInvalidContainer: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
sigmaRef.current = sigma;
|
sigmaRef.current = sigma;
|
||||||
|
|||||||
@ -16,7 +16,7 @@ export function SpaceSearch({
|
|||||||
triplets,
|
triplets,
|
||||||
searchQuery,
|
searchQuery,
|
||||||
onSearchChange,
|
onSearchChange,
|
||||||
placeholder = "Search in statement facts...",
|
placeholder = "Search in episodes...",
|
||||||
}: SpaceSearchProps) {
|
}: SpaceSearchProps) {
|
||||||
const [inputValue, setInputValue] = useState(searchQuery);
|
const [inputValue, setInputValue] = useState(searchQuery);
|
||||||
|
|
||||||
@ -30,41 +30,42 @@ export function SpaceSearch({
|
|||||||
}
|
}
|
||||||
}, [debouncedSearchQuery, searchQuery, onSearchChange]);
|
}, [debouncedSearchQuery, searchQuery, onSearchChange]);
|
||||||
|
|
||||||
// Helper to determine if a node is a statement
|
// Helper to determine if a node is an episode
|
||||||
const isStatementNode = useCallback((node: any) => {
|
const isEpisodeNode = useCallback((node: any) => {
|
||||||
// Check if node has a fact attribute (indicates it's a statement)
|
// Check if node has content attribute (indicates it's an episode)
|
||||||
return (
|
return (
|
||||||
node.attributes?.fact ||
|
node.attributes?.content ||
|
||||||
(node.labels && node.labels.includes("Statement"))
|
node.attributes?.episodeUuid ||
|
||||||
|
(node.labels && node.labels.includes("Episode"))
|
||||||
);
|
);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// Count statement nodes that match the search
|
// Count episode nodes that match the search
|
||||||
const matchingStatements = useMemo(() => {
|
const matchingEpisodes = useMemo(() => {
|
||||||
if (!debouncedSearchQuery.trim()) return 0;
|
if (!debouncedSearchQuery.trim()) return 0;
|
||||||
|
|
||||||
const query = debouncedSearchQuery.toLowerCase();
|
const query = debouncedSearchQuery.toLowerCase();
|
||||||
const statements: Record<string, number> = {};
|
const episodes: Record<string, number> = {};
|
||||||
|
|
||||||
triplets.forEach((triplet) => {
|
triplets.forEach((triplet) => {
|
||||||
// Check if source node is a statement and matches
|
// Check if source node is an episode and matches
|
||||||
if (
|
if (
|
||||||
isStatementNode(triplet.sourceNode) &&
|
isEpisodeNode(triplet.sourceNode) &&
|
||||||
triplet.sourceNode.attributes?.fact?.toLowerCase().includes(query)
|
triplet.sourceNode.attributes?.content?.toLowerCase().includes(query)
|
||||||
) {
|
) {
|
||||||
statements[triplet.sourceNode.uuid] = 1;
|
episodes[triplet.sourceNode.uuid] = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if target node is a statement and matches
|
// Check if target node is an episode and matches
|
||||||
if (
|
if (
|
||||||
isStatementNode(triplet.targetNode) &&
|
isEpisodeNode(triplet.targetNode) &&
|
||||||
triplet.targetNode.attributes?.fact?.toLowerCase().includes(query)
|
triplet.targetNode.attributes?.content?.toLowerCase().includes(query)
|
||||||
) {
|
) {
|
||||||
statements[triplet.targetNode.uuid] = 1;
|
episodes[triplet.targetNode.uuid] = 1;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
return Object.keys(statements).length;
|
return Object.keys(episodes).length;
|
||||||
}, [triplets, debouncedSearchQuery]);
|
}, [triplets, debouncedSearchQuery]);
|
||||||
|
|
||||||
const handleInputChange = (event: React.ChangeEvent<HTMLInputElement>) => {
|
const handleInputChange = (event: React.ChangeEvent<HTMLInputElement>) => {
|
||||||
@ -104,7 +105,7 @@ export function SpaceSearch({
|
|||||||
{/* Show search results count */}
|
{/* Show search results count */}
|
||||||
{debouncedSearchQuery.trim() && (
|
{debouncedSearchQuery.trim() && (
|
||||||
<div className="text-muted-foreground shrink-0 text-sm">
|
<div className="text-muted-foreground shrink-0 text-sm">
|
||||||
{matchingStatements} statement{matchingStatements !== 1 ? "s" : ""}
|
{matchingEpisodes} episode{matchingEpisodes !== 1 ? "s" : ""}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -1,13 +1,15 @@
|
|||||||
import { useState, useEffect, ReactNode } from "react";
|
import { useState, useEffect, type ReactNode } from "react";
|
||||||
import { useFetcher } from "@remix-run/react";
|
import { useFetcher } from "@remix-run/react";
|
||||||
import { AlertCircle, Loader2 } from "lucide-react";
|
import { AlertCircle, File, Loader2, MessageSquare } from "lucide-react";
|
||||||
import { Dialog, DialogContent, DialogHeader, DialogTitle } from "../ui/dialog";
|
|
||||||
import { Badge, BadgeColor } from "../ui/badge";
|
import { Badge, BadgeColor } from "../ui/badge";
|
||||||
import { type LogItem } from "~/hooks/use-logs";
|
import { type LogItem } from "~/hooks/use-logs";
|
||||||
import Markdown from "react-markdown";
|
import Markdown from "react-markdown";
|
||||||
import { getIconForAuthorise } from "../icon-utils";
|
import { getIconForAuthorise } from "../icon-utils";
|
||||||
import { cn } from "~/lib/utils";
|
import { cn, formatString } from "~/lib/utils";
|
||||||
import { getStatusColor } from "./utils";
|
import { getStatusColor } from "./utils";
|
||||||
|
import { format } from "date-fns";
|
||||||
|
import { SpaceDropdown } from "../spaces/space-dropdown";
|
||||||
|
import { StyledMarkdown } from "../common/styled-markdown";
|
||||||
|
|
||||||
interface LogDetailsProps {
|
interface LogDetailsProps {
|
||||||
log: LogItem;
|
log: LogItem;
|
||||||
@ -33,25 +35,29 @@ function PropertyItem({
|
|||||||
if (!value) return null;
|
if (!value) return null;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex items-center py-1">
|
<div className="flex items-center py-1 !text-base">
|
||||||
<span className="text-muted-foreground min-w-[160px]">{label}</span>
|
<span className="text-muted-foreground min-w-[120px]">{label}</span>
|
||||||
|
|
||||||
{variant === "status" ? (
|
{variant === "status" ? (
|
||||||
<Badge
|
<Badge
|
||||||
className={cn(
|
className={cn(
|
||||||
"!bg-grayAlpha-100 text-muted-foreground h-7 rounded px-4 text-xs",
|
"text-foreground h-7 items-center gap-2 rounded !bg-transparent px-4.5 !text-base",
|
||||||
className,
|
className,
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
{statusColor && (
|
{statusColor && (
|
||||||
<BadgeColor className={cn(statusColor, "h-2.5 w-2.5")} />
|
<BadgeColor className={cn(statusColor, "h-2.5 w-2.5")} />
|
||||||
)}
|
)}
|
||||||
{typeof value === "string"
|
{value}
|
||||||
? value.charAt(0).toUpperCase() + value.slice(1).toLowerCase()
|
|
||||||
: value}
|
|
||||||
</Badge>
|
</Badge>
|
||||||
) : (
|
) : (
|
||||||
<Badge variant={variant} className={cn("h-7 rounded px-4", className)}>
|
<Badge
|
||||||
|
variant={variant}
|
||||||
|
className={cn(
|
||||||
|
"h-7 items-center gap-2 rounded bg-transparent px-4 !text-base",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
>
|
||||||
{icon}
|
{icon}
|
||||||
{value}
|
{value}
|
||||||
</Badge>
|
</Badge>
|
||||||
@ -73,6 +79,14 @@ interface EpisodeFactsResponse {
|
|||||||
invalidFacts: EpisodeFact[];
|
invalidFacts: EpisodeFact[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getStatusValue(status: string) {
|
||||||
|
if (status === "PENDING") {
|
||||||
|
return formatString("IN QUEUE");
|
||||||
|
}
|
||||||
|
|
||||||
|
return formatString(status);
|
||||||
|
}
|
||||||
|
|
||||||
export function LogDetails({ log }: LogDetailsProps) {
|
export function LogDetails({ log }: LogDetailsProps) {
|
||||||
const [facts, setFacts] = useState<any[]>([]);
|
const [facts, setFacts] = useState<any[]>([]);
|
||||||
const [invalidFacts, setInvalidFacts] = useState<any[]>([]);
|
const [invalidFacts, setInvalidFacts] = useState<any[]>([]);
|
||||||
@ -81,33 +95,35 @@ export function LogDetails({ log }: LogDetailsProps) {
|
|||||||
|
|
||||||
// Fetch episode facts when dialog opens and episodeUUID exists
|
// Fetch episode facts when dialog opens and episodeUUID exists
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (facts.length === 0) {
|
if (log.data?.type === "DOCUMENT" && log.data?.episodes?.length > 0) {
|
||||||
if (log.data?.type === "DOCUMENT" && log.data?.episodes?.length > 0) {
|
setFactsLoading(true);
|
||||||
setFactsLoading(true);
|
setFacts([]);
|
||||||
// Fetch facts for all episodes in DOCUMENT type
|
// Fetch facts for all episodes in DOCUMENT type
|
||||||
Promise.all(
|
Promise.all(
|
||||||
log.data.episodes.map((episodeId: string) =>
|
log.data.episodes.map((episodeId: string) =>
|
||||||
fetch(`/api/v1/episodes/${episodeId}/facts`).then((res) =>
|
fetch(`/api/v1/episodes/${episodeId}/facts`).then((res) =>
|
||||||
res.json(),
|
res.json(),
|
||||||
),
|
|
||||||
),
|
),
|
||||||
)
|
),
|
||||||
.then((results) => {
|
)
|
||||||
const allFacts = results.flatMap((result) => result.facts || []);
|
.then((results) => {
|
||||||
const allInvalidFacts = results.flatMap(
|
const allFacts = results.flatMap((result) => result.facts || []);
|
||||||
(result) => result.invalidFacts || [],
|
const allInvalidFacts = results.flatMap(
|
||||||
);
|
(result) => result.invalidFacts || [],
|
||||||
setFacts(allFacts);
|
);
|
||||||
setInvalidFacts(allInvalidFacts);
|
setFacts(allFacts);
|
||||||
setFactsLoading(false);
|
setInvalidFacts(allInvalidFacts);
|
||||||
})
|
setFactsLoading(false);
|
||||||
.catch(() => {
|
})
|
||||||
setFactsLoading(false);
|
.catch(() => {
|
||||||
});
|
setFactsLoading(false);
|
||||||
} else if (log.episodeUUID) {
|
});
|
||||||
setFactsLoading(true);
|
} else if (log.episodeUUID) {
|
||||||
fetcher.load(`/api/v1/episodes/${log.episodeUUID}/facts`);
|
setFactsLoading(true);
|
||||||
}
|
fetcher.load(`/api/v1/episodes/${log.episodeUUID}/facts`);
|
||||||
|
} else {
|
||||||
|
setFacts([]);
|
||||||
|
setInvalidFacts([]);
|
||||||
}
|
}
|
||||||
}, [log.episodeUUID, log.data?.type, log.data?.episodes, facts.length]);
|
}, [log.episodeUUID, log.data?.type, log.data?.episodes, facts.length]);
|
||||||
|
|
||||||
@ -122,43 +138,10 @@ export function LogDetails({ log }: LogDetailsProps) {
|
|||||||
}, [fetcher.data, fetcher.state]);
|
}, [fetcher.data, fetcher.state]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex w-full flex-col items-center">
|
<div className="flex h-full w-full flex-col items-center overflow-auto">
|
||||||
<div className="w-4xl">
|
<div className="max-w-4xl">
|
||||||
<div className="px-4 pt-4">
|
<div className="mt-5 mb-5 px-4">
|
||||||
<div className="mb-4 flex w-full items-center justify-between">
|
<div className="space-y-1">
|
||||||
<span>Episode Details</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="mb-10 px-4">
|
|
||||||
<div className="space-y-3">
|
|
||||||
{log.data?.type === "DOCUMENT" && log.data?.episodes ? (
|
|
||||||
<PropertyItem
|
|
||||||
label="Episodes"
|
|
||||||
value={
|
|
||||||
<div className="flex flex-wrap gap-1">
|
|
||||||
{log.data.episodes.map(
|
|
||||||
(episodeId: string, index: number) => (
|
|
||||||
<Badge
|
|
||||||
key={index}
|
|
||||||
variant="outline"
|
|
||||||
className="text-xs"
|
|
||||||
>
|
|
||||||
{episodeId}
|
|
||||||
</Badge>
|
|
||||||
),
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
}
|
|
||||||
variant="secondary"
|
|
||||||
/>
|
|
||||||
) : (
|
|
||||||
<PropertyItem
|
|
||||||
label="Episode Id"
|
|
||||||
value={log.episodeUUID}
|
|
||||||
variant="secondary"
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
<PropertyItem
|
<PropertyItem
|
||||||
label="Session Id"
|
label="Session Id"
|
||||||
value={log.data?.sessionId?.toLowerCase()}
|
value={log.data?.sessionId?.toLowerCase()}
|
||||||
@ -166,14 +149,21 @@ export function LogDetails({ log }: LogDetailsProps) {
|
|||||||
/>
|
/>
|
||||||
<PropertyItem
|
<PropertyItem
|
||||||
label="Type"
|
label="Type"
|
||||||
value={
|
value={formatString(
|
||||||
log.data?.type ? log.data.type.toLowerCase() : "conversation"
|
log.data?.type ? log.data.type.toLowerCase() : "conversation",
|
||||||
|
)}
|
||||||
|
icon={
|
||||||
|
log.data?.type === "CONVERSATION" ? (
|
||||||
|
<MessageSquare size={16} />
|
||||||
|
) : (
|
||||||
|
<File size={16} />
|
||||||
|
)
|
||||||
}
|
}
|
||||||
variant="secondary"
|
variant="secondary"
|
||||||
/>
|
/>
|
||||||
<PropertyItem
|
<PropertyItem
|
||||||
label="Source"
|
label="Source"
|
||||||
value={log.source?.toLowerCase()}
|
value={formatString(log.source?.toLowerCase())}
|
||||||
icon={
|
icon={
|
||||||
log.source &&
|
log.source &&
|
||||||
getIconForAuthorise(log.source.toLowerCase(), 16, undefined)
|
getIconForAuthorise(log.source.toLowerCase(), 16, undefined)
|
||||||
@ -183,19 +173,32 @@ export function LogDetails({ log }: LogDetailsProps) {
|
|||||||
|
|
||||||
<PropertyItem
|
<PropertyItem
|
||||||
label="Status"
|
label="Status"
|
||||||
value={log.status}
|
value={getStatusValue(log.status)}
|
||||||
variant="status"
|
variant="status"
|
||||||
statusColor={log.status && getStatusColor(log.status)}
|
statusColor={log.status && getStatusColor(log.status)}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
{/* Space Assignment for CONVERSATION type */}
|
||||||
|
{log.data.type.toLowerCase() === "conversation" &&
|
||||||
|
log?.episodeUUID && (
|
||||||
|
<div className="mt-2 flex items-start py-1">
|
||||||
|
<span className="text-muted-foreground min-w-[120px]">
|
||||||
|
Spaces
|
||||||
|
</span>
|
||||||
|
|
||||||
|
<SpaceDropdown
|
||||||
|
className="px-3"
|
||||||
|
episodeIds={[log.episodeUUID]}
|
||||||
|
selectedSpaceIds={log.spaceIds || []}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Error Details */}
|
{/* Error Details */}
|
||||||
{log.error && (
|
{log.error && (
|
||||||
<div className="mb-6 px-4">
|
<div className="mb-6 px-4">
|
||||||
<div className="mb-2 flex w-full items-center justify-between">
|
|
||||||
<span>Error Details</span>
|
|
||||||
</div>
|
|
||||||
<div className="bg-destructive/10 rounded-md p-3">
|
<div className="bg-destructive/10 rounded-md p-3">
|
||||||
<div className="flex items-start gap-2 text-red-600">
|
<div className="flex items-start gap-2 text-red-600">
|
||||||
<AlertCircle className="mt-0.5 h-4 w-4 flex-shrink-0" />
|
<AlertCircle className="mt-0.5 h-4 w-4 flex-shrink-0" />
|
||||||
@ -207,9 +210,63 @@ export function LogDetails({ log }: LogDetailsProps) {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{log.data?.type === "CONVERSATION" && (
|
||||||
|
<div className="flex flex-col items-center p-4 pt-0">
|
||||||
|
{/* Log Content */}
|
||||||
|
<div className="mb-4 w-full break-words whitespace-pre-wrap">
|
||||||
|
<div className="rounded-md">
|
||||||
|
<StyledMarkdown>{log.ingestText}</StyledMarkdown>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Episodes List for DOCUMENT type */}
|
||||||
|
{log.data?.type === "DOCUMENT" && log.episodeDetails?.length > 0 && (
|
||||||
|
<div className="mb-6 px-4">
|
||||||
|
<div className="mb-2 flex w-full items-center justify-between font-medium">
|
||||||
|
<span>Episodes ({log.episodeDetails.length})</span>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col gap-3">
|
||||||
|
{log.episodeDetails.map((episode: any, index: number) => (
|
||||||
|
<div
|
||||||
|
key={episode.uuid}
|
||||||
|
className="bg-grayAlpha-100 flex flex-col gap-3 rounded-md p-3"
|
||||||
|
>
|
||||||
|
<div className="flex items-start gap-3">
|
||||||
|
<div className="flex min-w-0 flex-1 flex-col gap-1">
|
||||||
|
<span className="text-muted-foreground text-xs">
|
||||||
|
Episode {index + 1}
|
||||||
|
</span>
|
||||||
|
<span className="truncate font-mono text-xs">
|
||||||
|
{episode.uuid}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<SpaceDropdown
|
||||||
|
episodeIds={[episode.uuid]}
|
||||||
|
selectedSpaceIds={episode.spaceIds || []}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{/* Episode Content */}
|
||||||
|
<div className="border-grayAlpha-200 border-t pt-3">
|
||||||
|
<div className="text-muted-foreground mb-1 text-xs">
|
||||||
|
Content
|
||||||
|
</div>
|
||||||
|
<div className="text-sm break-words whitespace-pre-wrap">
|
||||||
|
<StyledMarkdown>{episode.content}</StyledMarkdown>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
{/* Episode Facts */}
|
{/* Episode Facts */}
|
||||||
<div className="mb-6 px-4">
|
<div className="mb-6 px-4">
|
||||||
<div className="mb-2 flex w-full items-center justify-between">
|
<div className="mb-2 flex w-full items-center justify-between font-medium">
|
||||||
<span>Facts</span>
|
<span>Facts</span>
|
||||||
</div>
|
</div>
|
||||||
<div className="rounded-md">
|
<div className="rounded-md">
|
||||||
@ -218,20 +275,21 @@ export function LogDetails({ log }: LogDetailsProps) {
|
|||||||
<Loader2 className="h-4 w-4 animate-spin" />
|
<Loader2 className="h-4 w-4 animate-spin" />
|
||||||
</div>
|
</div>
|
||||||
) : facts.length > 0 ? (
|
) : facts.length > 0 ? (
|
||||||
<div className="flex flex-col gap-2">
|
<div className="flex flex-col gap-1">
|
||||||
{facts.map((fact) => (
|
{facts.map((fact) => (
|
||||||
<div
|
<div
|
||||||
key={fact.uuid}
|
key={fact.uuid}
|
||||||
className="bg-grayAlpha-100 rounded-md p-3"
|
className="bg-grayAlpha-100 flex items-center justify-between gap-2 rounded-md p-3"
|
||||||
>
|
>
|
||||||
<p className="mb-1 text-sm">{fact.fact}</p>
|
<p className="text-sm">{fact.fact}</p>
|
||||||
<div className="text-muted-foreground flex items-center gap-2 text-xs">
|
<div className="text-muted-foreground flex shrink-0 items-center gap-2 text-xs">
|
||||||
<span>
|
<span>
|
||||||
Valid: {new Date(fact.validAt).toLocaleString()}
|
Valid: {format(new Date(fact.validAt), "dd/MM/yyyy")}
|
||||||
</span>
|
</span>
|
||||||
{fact.invalidAt && (
|
{fact.invalidAt && (
|
||||||
<span>
|
<span>
|
||||||
Invalid: {new Date(fact.invalidAt).toLocaleString()}
|
Invalid:{" "}
|
||||||
|
{format(new Date(fact.invalidAt), "dd/MM/yyyy")}
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
{Object.keys(fact.attributes).length > 0 && (
|
{Object.keys(fact.attributes).length > 0 && (
|
||||||
@ -270,15 +328,6 @@ export function LogDetails({ log }: LogDetailsProps) {
|
|||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex max-h-[88vh] flex-col items-center overflow-auto p-4 pt-0">
|
|
||||||
{/* Log Content */}
|
|
||||||
<div className="mb-4 text-sm break-words whitespace-pre-wrap">
|
|
||||||
<div className="rounded-md">
|
|
||||||
<Markdown>{log.ingestText}</Markdown>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@ -1,10 +1,4 @@
|
|||||||
import { EllipsisVertical, Trash } from "lucide-react";
|
import { Trash, Copy, RotateCw } from "lucide-react";
|
||||||
import {
|
|
||||||
DropdownMenu,
|
|
||||||
DropdownMenuContent,
|
|
||||||
DropdownMenuItem,
|
|
||||||
DropdownMenuTrigger,
|
|
||||||
} from "../ui/dropdown-menu";
|
|
||||||
import { Button } from "../ui/button";
|
import { Button } from "../ui/button";
|
||||||
import {
|
import {
|
||||||
AlertDialog,
|
AlertDialog,
|
||||||
@ -17,15 +11,19 @@ import {
|
|||||||
AlertDialogTitle,
|
AlertDialogTitle,
|
||||||
} from "../ui/alert-dialog";
|
} from "../ui/alert-dialog";
|
||||||
import { useState, useEffect } from "react";
|
import { useState, useEffect } from "react";
|
||||||
import { redirect, useFetcher } from "@remix-run/react";
|
import { useFetcher, useNavigate } from "@remix-run/react";
|
||||||
|
import { toast } from "~/hooks/use-toast";
|
||||||
|
|
||||||
interface LogOptionsProps {
|
interface LogOptionsProps {
|
||||||
id: string;
|
id: string;
|
||||||
|
status?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const LogOptions = ({ id }: LogOptionsProps) => {
|
export const LogOptions = ({ id, status }: LogOptionsProps) => {
|
||||||
const [deleteDialogOpen, setDeleteDialogOpen] = useState(false);
|
const [deleteDialogOpen, setDeleteDialogOpen] = useState(false);
|
||||||
const deleteFetcher = useFetcher<{ success: boolean }>();
|
const deleteFetcher = useFetcher<{ success: boolean }>();
|
||||||
|
const retryFetcher = useFetcher<{ success: boolean }>();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
|
||||||
const handleDelete = () => {
|
const handleDelete = () => {
|
||||||
deleteFetcher.submit(
|
deleteFetcher.submit(
|
||||||
@ -39,43 +37,84 @@ export const LogOptions = ({ id }: LogOptionsProps) => {
|
|||||||
setDeleteDialogOpen(false);
|
setDeleteDialogOpen(false);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleCopy = async () => {
|
||||||
|
try {
|
||||||
|
await navigator.clipboard.writeText(id);
|
||||||
|
toast({
|
||||||
|
title: "Copied",
|
||||||
|
description: "Episode ID copied to clipboard",
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Failed to copy:", err);
|
||||||
|
toast({
|
||||||
|
title: "Error",
|
||||||
|
description: "Failed to copy ID",
|
||||||
|
variant: "destructive",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleRetry = () => {
|
||||||
|
retryFetcher.submit(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
action: `/api/v1/logs/${id}/retry`,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (deleteFetcher.state === "idle" && deleteFetcher.data?.success) {
|
if (deleteFetcher.state === "idle" && deleteFetcher.data?.success) {
|
||||||
redirect(`/home/inbox`);
|
navigate(`/home/inbox`);
|
||||||
}
|
}
|
||||||
}, [deleteFetcher.state, deleteFetcher.data]);
|
}, [deleteFetcher.state, deleteFetcher.data]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (retryFetcher.state === "idle" && retryFetcher.data?.success) {
|
||||||
|
toast({
|
||||||
|
title: "Success",
|
||||||
|
description: "Episode retry initiated",
|
||||||
|
});
|
||||||
|
// Reload the page to reflect the new status
|
||||||
|
window.location.reload();
|
||||||
|
}
|
||||||
|
}, [retryFetcher.state, retryFetcher.data]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<DropdownMenu>
|
<div className="flex items-center gap-2">
|
||||||
<DropdownMenuTrigger
|
{status === "FAILED" && (
|
||||||
asChild
|
<Button
|
||||||
|
variant="secondary"
|
||||||
|
size="sm"
|
||||||
|
className="gap-2 rounded"
|
||||||
|
onClick={handleRetry}
|
||||||
|
disabled={retryFetcher.state !== "idle"}
|
||||||
|
>
|
||||||
|
<RotateCw size={15} /> Retry
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
<Button
|
||||||
|
variant="secondary"
|
||||||
|
size="sm"
|
||||||
|
className="gap-2 rounded"
|
||||||
|
onClick={handleCopy}
|
||||||
|
>
|
||||||
|
<Copy size={15} /> Copy Id
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
variant="secondary"
|
||||||
|
size="sm"
|
||||||
|
className="gap-2 rounded"
|
||||||
onClick={(e) => {
|
onClick={(e) => {
|
||||||
e.stopPropagation();
|
setDeleteDialogOpen(true);
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<Button
|
<Trash size={15} /> Delete
|
||||||
variant="ghost"
|
</Button>
|
||||||
className="mr-0.5 h-8 shrink items-center justify-between gap-2 px-1.5"
|
</div>
|
||||||
>
|
|
||||||
<div className="flex items-center justify-between gap-2">
|
|
||||||
<EllipsisVertical size={16} />
|
|
||||||
</div>
|
|
||||||
</Button>
|
|
||||||
</DropdownMenuTrigger>
|
|
||||||
|
|
||||||
<DropdownMenuContent align="end">
|
|
||||||
<DropdownMenuItem
|
|
||||||
onClick={(e) => {
|
|
||||||
setDeleteDialogOpen(true);
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<Button variant="link" size="sm" className="gap-2 rounded">
|
|
||||||
<Trash size={15} /> Delete
|
|
||||||
</Button>
|
|
||||||
</DropdownMenuItem>
|
|
||||||
</DropdownMenuContent>
|
|
||||||
</DropdownMenu>
|
|
||||||
<AlertDialog open={deleteDialogOpen} onOpenChange={setDeleteDialogOpen}>
|
<AlertDialog open={deleteDialogOpen} onOpenChange={setDeleteDialogOpen}>
|
||||||
<AlertDialogContent>
|
<AlertDialogContent>
|
||||||
<AlertDialogHeader>
|
<AlertDialogHeader>
|
||||||
|
|||||||
@ -3,7 +3,8 @@ import { Badge, BadgeColor } from "../ui/badge";
|
|||||||
import { type LogItem } from "~/hooks/use-logs";
|
import { type LogItem } from "~/hooks/use-logs";
|
||||||
import { getIconForAuthorise } from "../icon-utils";
|
import { getIconForAuthorise } from "../icon-utils";
|
||||||
import { useNavigate, useParams } from "@remix-run/react";
|
import { useNavigate, useParams } from "@remix-run/react";
|
||||||
import { getStatusColor } from "./utils";
|
import { getStatusColor, getStatusValue } from "./utils";
|
||||||
|
import { File, MessageSquare } from "lucide-react";
|
||||||
|
|
||||||
interface LogTextCollapseProps {
|
interface LogTextCollapseProps {
|
||||||
text?: string;
|
text?: string;
|
||||||
@ -49,9 +50,13 @@ export function LogTextCollapse({ text, log }: LogTextCollapseProps) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const getIngestType = (log: LogItem) => {
|
const getIngestType = (log: LogItem) => {
|
||||||
const type = log.type ?? log.data.type ?? "Conversation";
|
const type = log.type ?? log.data.type ?? "CONVERSATION";
|
||||||
|
|
||||||
return type[0].toUpperCase();
|
return type === "CONVERSATION" ? (
|
||||||
|
<MessageSquare size={14} />
|
||||||
|
) : (
|
||||||
|
<File size={14} />
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@ -69,7 +74,7 @@ export function LogTextCollapse({ text, log }: LogTextCollapseProps) {
|
|||||||
<div className={cn("flex w-full min-w-[0px] shrink flex-col")}>
|
<div className={cn("flex w-full min-w-[0px] shrink flex-col")}>
|
||||||
<div className="flex w-full items-center justify-between gap-4">
|
<div className="flex w-full items-center justify-between gap-4">
|
||||||
<div className="inline-flex min-h-[24px] min-w-[0px] shrink items-center justify-start">
|
<div className="inline-flex min-h-[24px] min-w-[0px] shrink items-center justify-start">
|
||||||
<div className={cn("truncate text-left")}>
|
<div className={cn("truncate text-left text-base")}>
|
||||||
{text.replace(/<[^>]+>/g, "")}
|
{text.replace(/<[^>]+>/g, "")}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -83,8 +88,7 @@ export function LogTextCollapse({ text, log }: LogTextCollapseProps) {
|
|||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
<BadgeColor className={cn(getStatusColor(log.status))} />
|
<BadgeColor className={cn(getStatusColor(log.status))} />
|
||||||
{log.status.charAt(0).toUpperCase() +
|
{getStatusValue(log.status)}
|
||||||
log.status.slice(1).toLowerCase()}
|
|
||||||
</Badge>
|
</Badge>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -93,7 +97,7 @@ export function LogTextCollapse({ text, log }: LogTextCollapseProps) {
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex items-center justify-between">
|
<div className="flex items-center justify-between">
|
||||||
<div className="flex items-center gap-1">
|
<div className="flex items-center gap-1 font-light">
|
||||||
{getIconForAuthorise(log.source.toLowerCase(), 12, undefined)}
|
{getIconForAuthorise(log.source.toLowerCase(), 12, undefined)}
|
||||||
{log.source.toLowerCase()}
|
{log.source.toLowerCase()}
|
||||||
</div>
|
</div>
|
||||||
@ -101,7 +105,7 @@ export function LogTextCollapse({ text, log }: LogTextCollapseProps) {
|
|||||||
<div className="flex items-center gap-1">
|
<div className="flex items-center gap-1">
|
||||||
<Badge
|
<Badge
|
||||||
className={cn(
|
className={cn(
|
||||||
"!bg-grayAlpha-100 text-muted-foreground rounded text-xs",
|
"text-muted-foreground rounded !bg-transparent text-xs",
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
{getIngestType(log)}
|
{getIngestType(log)}
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
import { formatString } from "~/lib/utils";
|
||||||
|
|
||||||
export const getStatusColor = (status: string) => {
|
export const getStatusColor = (status: string) => {
|
||||||
switch (status) {
|
switch (status) {
|
||||||
case "PROCESSING":
|
case "PROCESSING":
|
||||||
@ -14,3 +16,11 @@ export const getStatusColor = (status: string) => {
|
|||||||
return "bg-gray-800";
|
return "bg-gray-800";
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export function getStatusValue(status: string) {
|
||||||
|
if (status === "PENDING") {
|
||||||
|
return formatString("In Queue");
|
||||||
|
}
|
||||||
|
|
||||||
|
return formatString(status);
|
||||||
|
}
|
||||||
|
|||||||
@ -10,6 +10,7 @@ import {
|
|||||||
import { type LogItem } from "~/hooks/use-logs";
|
import { type LogItem } from "~/hooks/use-logs";
|
||||||
import { ScrollManagedList } from "../virtualized-list";
|
import { ScrollManagedList } from "../virtualized-list";
|
||||||
import { LogTextCollapse } from "./log-text-collapse";
|
import { LogTextCollapse } from "./log-text-collapse";
|
||||||
|
import { LoaderCircle } from "lucide-react";
|
||||||
|
|
||||||
interface VirtualLogsListProps {
|
interface VirtualLogsListProps {
|
||||||
logs: LogItem[];
|
logs: LogItem[];
|
||||||
@ -139,7 +140,7 @@ export function VirtualLogsList({
|
|||||||
|
|
||||||
{isLoading && (
|
{isLoading && (
|
||||||
<div className="text-muted-foreground p-4 text-center text-sm">
|
<div className="text-muted-foreground p-4 text-center text-sm">
|
||||||
Loading more logs...
|
<LoaderCircle size={18} className="mr-1 animate-spin" />
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
4
apps/webapp/app/components/onboarding/index.ts
Normal file
4
apps/webapp/app/components/onboarding/index.ts
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
export { OnboardingModal } from "./onboarding-modal";
|
||||||
|
export { Provider, OnboardingStep } from "./types";
|
||||||
|
export type { ProviderConfig, OnboardingState } from "./types";
|
||||||
|
export { PROVIDER_CONFIGS, SUGGESTED_INGESTION_PROMPTS, VERIFICATION_PROMPT } from "./provider-config";
|
||||||
137
apps/webapp/app/components/onboarding/ingestion-step.tsx
Normal file
137
apps/webapp/app/components/onboarding/ingestion-step.tsx
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
import { useState } from "react";
|
||||||
|
import { Copy, Check, Loader2, AlertCircle } from "lucide-react";
|
||||||
|
import { Button } from "../ui";
|
||||||
|
import { SUGGESTED_INGESTION_PROMPTS } from "./provider-config";
|
||||||
|
|
||||||
|
interface IngestionStepProps {
|
||||||
|
providerName: string;
|
||||||
|
ingestionStatus: "idle" | "waiting" | "processing" | "complete" | "error";
|
||||||
|
onStartWaiting: () => void;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function IngestionStep({
|
||||||
|
providerName,
|
||||||
|
ingestionStatus,
|
||||||
|
onStartWaiting,
|
||||||
|
error,
|
||||||
|
}: IngestionStepProps) {
|
||||||
|
const [copiedIndex, setCopiedIndex] = useState<number | null>(null);
|
||||||
|
|
||||||
|
const handleCopy = async (text: string, index: number) => {
|
||||||
|
await navigator.clipboard.writeText(text);
|
||||||
|
setCopiedIndex(index);
|
||||||
|
setTimeout(() => setCopiedIndex(null), 2000);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div>
|
||||||
|
<h2 className="mb-2 text-xl font-semibold">
|
||||||
|
Let's Store Your First Memory
|
||||||
|
</h2>
|
||||||
|
<p className="text-muted-foreground text-sm">
|
||||||
|
Copy one of these prompts and paste it into {providerName} to create
|
||||||
|
your first memory
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{ingestionStatus === "idle" && (
|
||||||
|
<>
|
||||||
|
<div className="space-y-3">
|
||||||
|
{SUGGESTED_INGESTION_PROMPTS.map((prompt, index) => (
|
||||||
|
<div
|
||||||
|
key={index}
|
||||||
|
className="group bg-grayAlpha-100 hover:border-primary/50 relative rounded-lg border border-gray-300 p-4 transition-colors"
|
||||||
|
>
|
||||||
|
<p className="pr-10 text-sm">{prompt}</p>
|
||||||
|
<button
|
||||||
|
onClick={() => handleCopy(prompt, index)}
|
||||||
|
className="hover:bg-background absolute top-3 right-3 rounded-md p-2 transition-colors"
|
||||||
|
title="Copy to clipboard"
|
||||||
|
>
|
||||||
|
{copiedIndex === index ? (
|
||||||
|
<Check className="h-4 w-4 text-green-500" />
|
||||||
|
) : (
|
||||||
|
<Copy className="text-muted-foreground h-4 w-4" />
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-center justify-between rounded-lg border border-blue-500/20 bg-blue-500/10 p-4">
|
||||||
|
<div className="flex items-start gap-3">
|
||||||
|
<AlertCircle className="mt-0.5 h-5 w-5 text-blue-500" />
|
||||||
|
<div className="text-sm">
|
||||||
|
<p className="font-medium text-blue-700 dark:text-blue-300">
|
||||||
|
Important
|
||||||
|
</p>
|
||||||
|
<p className="text-blue-600 dark:text-blue-400">
|
||||||
|
After pasting the prompt in {providerName}, click the button
|
||||||
|
below to wait for ingestion
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex justify-end">
|
||||||
|
<Button onClick={onStartWaiting} size="lg">
|
||||||
|
I've Sent the Prompt
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{(ingestionStatus === "waiting" || ingestionStatus === "processing") && (
|
||||||
|
<div className="flex flex-col items-center justify-center space-y-4 py-12">
|
||||||
|
<Loader2 className="text-primary h-12 w-12 animate-spin" />
|
||||||
|
<div className="space-y-2 text-center">
|
||||||
|
<h3 className="text-lg font-medium">
|
||||||
|
{ingestionStatus === "waiting"
|
||||||
|
? "Waiting for your first ingestion..."
|
||||||
|
: "Processing your memory..."}
|
||||||
|
</h3>
|
||||||
|
<p className="text-muted-foreground max-w-md text-sm">
|
||||||
|
{ingestionStatus === "waiting"
|
||||||
|
? "Make sure you've sent the prompt in your provider app. We're listening for the first memory ingestion."
|
||||||
|
: "We're storing your information. This usually takes a few seconds."}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{ingestionStatus === "complete" && (
|
||||||
|
<div className="flex flex-col items-center justify-center space-y-4 py-12">
|
||||||
|
<div className="flex h-16 w-16 items-center justify-center rounded-full bg-green-500/10">
|
||||||
|
<Check className="h-8 w-8 text-green-500" />
|
||||||
|
</div>
|
||||||
|
<div className="space-y-2 text-center">
|
||||||
|
<h3 className="text-lg font-medium">Memory stored successfully!</h3>
|
||||||
|
<p className="text-muted-foreground text-sm">
|
||||||
|
Your first memory has been ingested. Let's verify it worked.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{ingestionStatus === "error" && (
|
||||||
|
<div className="flex flex-col items-center justify-center space-y-4 py-12">
|
||||||
|
<div className="flex h-16 w-16 items-center justify-center rounded-full bg-red-500/10">
|
||||||
|
<AlertCircle className="h-8 w-8 text-red-500" />
|
||||||
|
</div>
|
||||||
|
<div className="space-y-2 text-center">
|
||||||
|
<h3 className="text-lg font-medium">Something went wrong</h3>
|
||||||
|
<p className="text-muted-foreground max-w-md text-sm">
|
||||||
|
{error ||
|
||||||
|
"We couldn't detect your memory ingestion. Please try again or check your provider connection."}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<Button onClick={onStartWaiting} variant="secondary">
|
||||||
|
Try Again
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
230
apps/webapp/app/components/onboarding/onboarding-modal.tsx
Normal file
230
apps/webapp/app/components/onboarding/onboarding-modal.tsx
Normal file
@ -0,0 +1,230 @@
|
|||||||
|
import { useState } from "react";
|
||||||
|
import { Dialog, DialogContent, DialogHeader, DialogTitle } from "../ui/dialog";
|
||||||
|
import { type Provider, OnboardingStep } from "./types";
|
||||||
|
import { ProviderSelectionStep } from "./provider-selection-step";
|
||||||
|
import { IngestionStep } from "./ingestion-step";
|
||||||
|
import { VerificationStep } from "./verification-step";
|
||||||
|
import { PROVIDER_CONFIGS } from "./provider-config";
|
||||||
|
import { Progress } from "../ui/progress";
|
||||||
|
|
||||||
|
interface OnboardingModalProps {
|
||||||
|
isOpen: boolean;
|
||||||
|
onClose: () => void;
|
||||||
|
onComplete: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function OnboardingModal({
|
||||||
|
isOpen,
|
||||||
|
onClose,
|
||||||
|
onComplete,
|
||||||
|
}: OnboardingModalProps) {
|
||||||
|
const [currentStep, setCurrentStep] = useState<OnboardingStep>(
|
||||||
|
OnboardingStep.PROVIDER_SELECTION,
|
||||||
|
);
|
||||||
|
const [selectedProvider, setSelectedProvider] = useState<Provider>();
|
||||||
|
const [ingestionStatus, setIngestionStatus] = useState<
|
||||||
|
"idle" | "waiting" | "processing" | "complete" | "error"
|
||||||
|
>("idle");
|
||||||
|
const [verificationResult, setVerificationResult] = useState<string>();
|
||||||
|
const [isCheckingRecall, setIsCheckingRecall] = useState(false);
|
||||||
|
const [error, setError] = useState<string>();
|
||||||
|
|
||||||
|
// Calculate progress
|
||||||
|
const getProgress = () => {
|
||||||
|
switch (currentStep) {
|
||||||
|
case OnboardingStep.PROVIDER_SELECTION:
|
||||||
|
return 33;
|
||||||
|
case OnboardingStep.FIRST_INGESTION:
|
||||||
|
return 66;
|
||||||
|
case OnboardingStep.VERIFICATION:
|
||||||
|
return 100;
|
||||||
|
default:
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Poll for ingestion status
|
||||||
|
const pollIngestion = async () => {
|
||||||
|
setIngestionStatus("waiting");
|
||||||
|
|
||||||
|
try {
|
||||||
|
const maxAttempts = 30; // 60 seconds (30 * 2s)
|
||||||
|
let attempts = 0;
|
||||||
|
|
||||||
|
// Store the timestamp when polling starts
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
const poll = async (): Promise<boolean> => {
|
||||||
|
if (attempts >= maxAttempts) {
|
||||||
|
throw new Error("Ingestion timeout - please try again");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for new ingestion logs from the last 5 minutes
|
||||||
|
const response = await fetch("/api/v1/logs?limit=1");
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
// Check if there's a recent ingestion (created after we started polling)
|
||||||
|
if (data.logs && data.logs.length > 0) {
|
||||||
|
const latestLog = data.logs[0];
|
||||||
|
const logTime = new Date(latestLog.time).getTime();
|
||||||
|
|
||||||
|
// If the log was created after we started polling, we found a new ingestion
|
||||||
|
if (logTime >= startTime) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||||
|
attempts++;
|
||||||
|
|
||||||
|
return poll();
|
||||||
|
};
|
||||||
|
|
||||||
|
const success = await poll();
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
setIngestionStatus("complete");
|
||||||
|
// Auto-advance to verification step after 2 seconds
|
||||||
|
setTimeout(() => {
|
||||||
|
setCurrentStep(OnboardingStep.VERIFICATION);
|
||||||
|
}, 2000);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : "Unknown error occurred");
|
||||||
|
setIngestionStatus("error");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleProviderSelect = (provider: Provider) => {
|
||||||
|
setSelectedProvider(provider);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleContinueFromProvider = () => {
|
||||||
|
setCurrentStep(OnboardingStep.FIRST_INGESTION);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleStartWaiting = () => {
|
||||||
|
pollIngestion();
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleComplete = () => {
|
||||||
|
setCurrentStep(OnboardingStep.COMPLETE);
|
||||||
|
onComplete();
|
||||||
|
onClose();
|
||||||
|
};
|
||||||
|
|
||||||
|
// Poll for recall logs to detect verification
|
||||||
|
const pollRecallLogs = async () => {
|
||||||
|
setIsCheckingRecall(true);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const maxAttempts = 30; // 60 seconds
|
||||||
|
let attempts = 0;
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
const poll = async (): Promise<string | null> => {
|
||||||
|
if (attempts >= maxAttempts) {
|
||||||
|
throw new Error("Verification timeout - please try again");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for new recall logs
|
||||||
|
const response = await fetch("/api/v1/recall-logs?limit=1");
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
// Check if there's a recent recall (created after we started polling)
|
||||||
|
if (data.recallLogs && data.recallLogs.length > 0) {
|
||||||
|
const latestRecall = data.recallLogs[0];
|
||||||
|
const recallTime = new Date(latestRecall.createdAt).getTime();
|
||||||
|
|
||||||
|
// If the recall was created after we started polling
|
||||||
|
if (recallTime >= startTime) {
|
||||||
|
// Return the query as verification result
|
||||||
|
return latestRecall.query || "Recall detected successfully";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||||
|
attempts++;
|
||||||
|
|
||||||
|
return poll();
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await poll();
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
setVerificationResult(result);
|
||||||
|
setIsCheckingRecall(false);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : "Unknown error occurred");
|
||||||
|
setIsCheckingRecall(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getStepTitle = () => {
|
||||||
|
switch (currentStep) {
|
||||||
|
case OnboardingStep.PROVIDER_SELECTION:
|
||||||
|
return "Step 1 of 3";
|
||||||
|
case OnboardingStep.FIRST_INGESTION:
|
||||||
|
return "Step 2 of 3";
|
||||||
|
case OnboardingStep.VERIFICATION:
|
||||||
|
return "Step 3 of 3";
|
||||||
|
default:
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Dialog open={isOpen} onOpenChange={onClose}>
|
||||||
|
<DialogContent className="max-h-[90vh] max-w-3xl overflow-y-auto p-4">
|
||||||
|
<DialogHeader>
|
||||||
|
<div className="space-y-3">
|
||||||
|
<DialogTitle className="text-2xl">Welcome to Core</DialogTitle>
|
||||||
|
<div className="space-y-2">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<p className="text-muted-foreground text-sm">
|
||||||
|
{getStepTitle()}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<Progress
|
||||||
|
segments={[{ value: getProgress() }]}
|
||||||
|
className="mb-2"
|
||||||
|
color="#c15e50"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</DialogHeader>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
{currentStep === OnboardingStep.PROVIDER_SELECTION && (
|
||||||
|
<ProviderSelectionStep
|
||||||
|
selectedProvider={selectedProvider}
|
||||||
|
onSelectProvider={handleProviderSelect}
|
||||||
|
onContinue={handleContinueFromProvider}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{currentStep === OnboardingStep.FIRST_INGESTION &&
|
||||||
|
selectedProvider && (
|
||||||
|
<IngestionStep
|
||||||
|
providerName={PROVIDER_CONFIGS[selectedProvider].name}
|
||||||
|
ingestionStatus={ingestionStatus}
|
||||||
|
onStartWaiting={handleStartWaiting}
|
||||||
|
error={error}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{currentStep === OnboardingStep.VERIFICATION && selectedProvider && (
|
||||||
|
<VerificationStep
|
||||||
|
providerName={PROVIDER_CONFIGS[selectedProvider].name}
|
||||||
|
verificationResult={verificationResult}
|
||||||
|
isCheckingRecall={isCheckingRecall}
|
||||||
|
onStartChecking={pollRecallLogs}
|
||||||
|
onComplete={handleComplete}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</DialogContent>
|
||||||
|
</Dialog>
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -15,6 +15,7 @@ interface OnboardingQuestionProps {
|
|||||||
isLast: boolean;
|
isLast: boolean;
|
||||||
currentStep: number;
|
currentStep: number;
|
||||||
totalSteps: number;
|
totalSteps: number;
|
||||||
|
loading?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export default function OnboardingQuestionComponent({
|
export default function OnboardingQuestionComponent({
|
||||||
@ -27,6 +28,7 @@ export default function OnboardingQuestionComponent({
|
|||||||
isLast,
|
isLast,
|
||||||
currentStep,
|
currentStep,
|
||||||
totalSteps,
|
totalSteps,
|
||||||
|
loading,
|
||||||
}: OnboardingQuestionProps) {
|
}: OnboardingQuestionProps) {
|
||||||
const [selectedValue, setSelectedValue] = useState<string | string[]>(
|
const [selectedValue, setSelectedValue] = useState<string | string[]>(
|
||||||
answer || (question.type === "multi-select" ? [] : ""),
|
answer || (question.type === "multi-select" ? [] : ""),
|
||||||
@ -137,6 +139,7 @@ export default function OnboardingQuestionComponent({
|
|||||||
variant="ghost"
|
variant="ghost"
|
||||||
size="xl"
|
size="xl"
|
||||||
onClick={onPrevious}
|
onClick={onPrevious}
|
||||||
|
disabled={loading}
|
||||||
className="rounded-lg px-4 py-2"
|
className="rounded-lg px-4 py-2"
|
||||||
>
|
>
|
||||||
Previous
|
Previous
|
||||||
@ -148,7 +151,8 @@ export default function OnboardingQuestionComponent({
|
|||||||
variant="secondary"
|
variant="secondary"
|
||||||
size="xl"
|
size="xl"
|
||||||
onClick={onNext}
|
onClick={onNext}
|
||||||
disabled={!isValid()}
|
isLoading={!!loading}
|
||||||
|
disabled={!isValid() || loading}
|
||||||
className="rounded-lg px-4 py-2"
|
className="rounded-lg px-4 py-2"
|
||||||
>
|
>
|
||||||
{isLast ? "Complete Profile" : "Continue"}
|
{isLast ? "Complete Profile" : "Continue"}
|
||||||
|
|||||||
54
apps/webapp/app/components/onboarding/provider-config.ts
Normal file
54
apps/webapp/app/components/onboarding/provider-config.ts
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
import { Provider, type ProviderConfig } from "./types";
|
||||||
|
|
||||||
|
export const PROVIDER_CONFIGS: Record<Provider, ProviderConfig> = {
|
||||||
|
[Provider.CLAUDE_CODE]: {
|
||||||
|
id: Provider.CLAUDE_CODE,
|
||||||
|
name: "Claude Code CLI",
|
||||||
|
description: "Connect your Claude Code CLI to CORE's memory system",
|
||||||
|
docsUrl: "https://docs.heysol.ai/providers/claude-code",
|
||||||
|
icon: "claude",
|
||||||
|
},
|
||||||
|
[Provider.CLAUDE]: {
|
||||||
|
id: Provider.CLAUDE,
|
||||||
|
name: "Claude",
|
||||||
|
description: "Connect your Claude Desktop app to CORE's memory system",
|
||||||
|
docsUrl: "https://docs.heysol.ai/providers/claude",
|
||||||
|
icon: "claude",
|
||||||
|
},
|
||||||
|
[Provider.CURSOR]: {
|
||||||
|
id: Provider.CURSOR,
|
||||||
|
name: "Cursor",
|
||||||
|
description: "Connect your Cursor Desktop app to CORE's memory system",
|
||||||
|
docsUrl: "https://docs.heysol.ai/providers/cursor",
|
||||||
|
icon: "cursor",
|
||||||
|
},
|
||||||
|
[Provider.KILO_CODE]: {
|
||||||
|
id: Provider.KILO_CODE,
|
||||||
|
name: "Kilo-Code",
|
||||||
|
description: "Connect Kilo Code Agent to CORE's memory system via MCP",
|
||||||
|
docsUrl: "https://docs.heysol.ai/providers/kilo-code",
|
||||||
|
icon: "kilo-code",
|
||||||
|
},
|
||||||
|
[Provider.VSCODE]: {
|
||||||
|
id: Provider.VSCODE,
|
||||||
|
name: "VS Code (Github Copilot)",
|
||||||
|
description: "Connect your VS Code editor to CORE's memory system via MCP",
|
||||||
|
docsUrl: "https://docs.heysol.ai/providers/vscode",
|
||||||
|
icon: "vscode",
|
||||||
|
},
|
||||||
|
[Provider.ZED]: {
|
||||||
|
id: Provider.ZED,
|
||||||
|
name: "Zed",
|
||||||
|
description: "Connect your Zed editor to CORE's memory system via MCP",
|
||||||
|
docsUrl: "https://docs.heysol.ai/providers/zed",
|
||||||
|
icon: "zed",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export const SUGGESTED_INGESTION_PROMPTS = [
|
||||||
|
"I'm a full-stack developer working on a React and Node.js application. I prefer TypeScript, functional programming patterns, and writing comprehensive tests.",
|
||||||
|
"I'm working on a machine learning project using Python and PyTorch. I focus on computer vision and prefer Jupyter notebooks for exploration.",
|
||||||
|
"I'm a DevOps engineer managing Kubernetes clusters. I work primarily with Terraform, Helm, and CI/CD pipelines using GitHub Actions.",
|
||||||
|
];
|
||||||
|
|
||||||
|
export const VERIFICATION_PROMPT = "Who am I? Tell me what you know about me.";
|
||||||
@ -0,0 +1,89 @@
|
|||||||
|
import { Check, ExternalLink } from "lucide-react";
|
||||||
|
import { Button } from "../ui";
|
||||||
|
import { PROVIDER_CONFIGS } from "./provider-config";
|
||||||
|
import { type Provider } from "./types";
|
||||||
|
import { getIconForAuthorise } from "../icon-utils";
|
||||||
|
|
||||||
|
interface ProviderSelectionStepProps {
|
||||||
|
selectedProvider?: Provider;
|
||||||
|
onSelectProvider: (provider: Provider) => void;
|
||||||
|
onContinue: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function ProviderSelectionStep({
|
||||||
|
selectedProvider,
|
||||||
|
onSelectProvider,
|
||||||
|
onContinue,
|
||||||
|
}: ProviderSelectionStepProps) {
|
||||||
|
const providers = Object.values(PROVIDER_CONFIGS);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-2">
|
||||||
|
<div>
|
||||||
|
<h2 className="mb-2 text-xl font-semibold">Choose Your Provider</h2>
|
||||||
|
<p className="text-muted-foreground text-sm">
|
||||||
|
Select the application you'll use to connect with Core
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="grid grid-cols-1 gap-3 sm:grid-cols-2 lg:grid-cols-3">
|
||||||
|
{providers.map((provider) => {
|
||||||
|
const isSelected = selectedProvider === provider.id;
|
||||||
|
return (
|
||||||
|
<Button
|
||||||
|
key={provider.id}
|
||||||
|
variant="outline"
|
||||||
|
onClick={() => onSelectProvider(provider.id)}
|
||||||
|
size="2xl"
|
||||||
|
className={`relative flex flex-col items-start justify-center gap-1 rounded-lg border-1 border-gray-300 p-4 text-left transition-all ${
|
||||||
|
isSelected
|
||||||
|
? "border-primary bg-primary/5"
|
||||||
|
: "hover:border-primary/50 border-gray-300"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<div className="flex h-full items-center gap-2">
|
||||||
|
{getIconForAuthorise(provider.icon, 20)}
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<h3 className="font-medium">{provider.name}</h3>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Button>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{selectedProvider && (
|
||||||
|
<div className="bg-grayAlpha-100 space-y-4 rounded-lg p-4">
|
||||||
|
<div className="space-y-3">
|
||||||
|
<h3 className="font-medium">Next Steps</h3>
|
||||||
|
<p className="text-muted-foreground text-sm">
|
||||||
|
Follow our setup guide to connect{" "}
|
||||||
|
{PROVIDER_CONFIGS[selectedProvider].name} with Core. Once you've
|
||||||
|
completed the setup, come back here to continue.
|
||||||
|
</p>
|
||||||
|
<a
|
||||||
|
href={PROVIDER_CONFIGS[selectedProvider].docsUrl}
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="bg-primary text-primary-foreground hover:bg-primary/90 inline-flex items-center gap-2 rounded-md px-4 py-2 text-sm font-medium transition-colors"
|
||||||
|
>
|
||||||
|
Open Setup Guide
|
||||||
|
<ExternalLink className="h-4 w-4" />
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="flex justify-end">
|
||||||
|
<Button
|
||||||
|
onClick={onContinue}
|
||||||
|
disabled={!selectedProvider}
|
||||||
|
size="lg"
|
||||||
|
variant="secondary"
|
||||||
|
>
|
||||||
|
Continue to Setup
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
32
apps/webapp/app/components/onboarding/types.ts
Normal file
32
apps/webapp/app/components/onboarding/types.ts
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
export enum Provider {
|
||||||
|
CLAUDE_CODE = "claude-code",
|
||||||
|
CLAUDE = "claude",
|
||||||
|
CURSOR = "cursor",
|
||||||
|
KILO_CODE = "kilo-code",
|
||||||
|
VSCODE = "vscode",
|
||||||
|
ZED = "zed",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum OnboardingStep {
|
||||||
|
PROVIDER_SELECTION = "provider_selection",
|
||||||
|
FIRST_INGESTION = "first_ingestion",
|
||||||
|
VERIFICATION = "verification",
|
||||||
|
COMPLETE = "complete",
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ProviderConfig {
|
||||||
|
id: Provider;
|
||||||
|
name: string;
|
||||||
|
description: string;
|
||||||
|
docsUrl: string;
|
||||||
|
icon: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface OnboardingState {
|
||||||
|
currentStep: OnboardingStep;
|
||||||
|
selectedProvider?: Provider;
|
||||||
|
isConnected: boolean;
|
||||||
|
ingestionStatus: "idle" | "waiting" | "processing" | "complete" | "error";
|
||||||
|
verificationResult?: string;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
101
apps/webapp/app/components/onboarding/verification-step.tsx
Normal file
101
apps/webapp/app/components/onboarding/verification-step.tsx
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
import { useState } from "react";
|
||||||
|
import {
|
||||||
|
Copy,
|
||||||
|
Check,
|
||||||
|
AlertCircle,
|
||||||
|
ThumbsUp,
|
||||||
|
ThumbsDown,
|
||||||
|
Loader2,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { Button } from "../ui";
|
||||||
|
import { VERIFICATION_PROMPT } from "./provider-config";
|
||||||
|
|
||||||
|
interface VerificationStepProps {
|
||||||
|
providerName: string;
|
||||||
|
verificationResult?: string;
|
||||||
|
isCheckingRecall?: boolean;
|
||||||
|
onStartChecking: () => void;
|
||||||
|
onComplete: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function VerificationStep({
|
||||||
|
providerName,
|
||||||
|
verificationResult,
|
||||||
|
isCheckingRecall = false,
|
||||||
|
onStartChecking,
|
||||||
|
onComplete,
|
||||||
|
}: VerificationStepProps) {
|
||||||
|
const [copied, setCopied] = useState(false);
|
||||||
|
|
||||||
|
const handleCopy = async () => {
|
||||||
|
await navigator.clipboard.writeText(VERIFICATION_PROMPT);
|
||||||
|
setCopied(true);
|
||||||
|
setTimeout(() => setCopied(false), 2000);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div>
|
||||||
|
<h2 className="mb-2 text-xl font-semibold">Verify Your Memory</h2>
|
||||||
|
<p className="text-muted-foreground text-sm">
|
||||||
|
Let's test if your memory is working correctly by asking the AI about
|
||||||
|
you
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{!verificationResult && !isCheckingRecall && (
|
||||||
|
<>
|
||||||
|
<div className="group bg-grayAlpha-100 relative rounded-lg border border-gray-300 p-4">
|
||||||
|
<p className="mb-1 text-sm font-medium">Copy this prompt:</p>
|
||||||
|
<p className="pr-10 text-sm">{VERIFICATION_PROMPT}</p>
|
||||||
|
<button
|
||||||
|
onClick={handleCopy}
|
||||||
|
className="hover:bg-background absolute top-3 right-3 rounded-md p-2 transition-colors"
|
||||||
|
title="Copy to clipboard"
|
||||||
|
>
|
||||||
|
{copied ? (
|
||||||
|
<Check className="h-4 w-4 text-green-500" />
|
||||||
|
) : (
|
||||||
|
<Copy className="text-muted-foreground h-4 w-4" />
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-center gap-3 rounded-lg border border-blue-500/20 bg-blue-500/10 p-4">
|
||||||
|
<AlertCircle className="h-5 w-5 shrink-0 text-blue-500" />
|
||||||
|
<div className="flex-1 text-sm">
|
||||||
|
<p className="text-blue-600 dark:text-blue-400">
|
||||||
|
Paste this prompt in {providerName}. Once you ask, click the
|
||||||
|
button below to detect the recall.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex justify-end gap-3">
|
||||||
|
<Button onClick={onComplete} variant="ghost" size="lg">
|
||||||
|
Skip Verification
|
||||||
|
</Button>
|
||||||
|
<Button onClick={onStartChecking} size="lg" variant="secondary">
|
||||||
|
I've Asked the Question
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{isCheckingRecall && !verificationResult && (
|
||||||
|
<div className="flex flex-col items-center justify-center space-y-4 py-12">
|
||||||
|
<Loader2 className="text-primary h-12 w-12 animate-spin" />
|
||||||
|
<div className="space-y-2 text-center">
|
||||||
|
<h3 className="text-lg font-medium">
|
||||||
|
Waiting for your recall query...
|
||||||
|
</h3>
|
||||||
|
<p className="text-muted-foreground max-w-md text-sm">
|
||||||
|
Make sure you've asked "{VERIFICATION_PROMPT}" in {providerName}.
|
||||||
|
We're listening for the recall.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -1,4 +1,5 @@
|
|||||||
import * as React from "react";
|
import * as React from "react";
|
||||||
|
import { useHotkeys } from "react-hotkeys-hook";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
Sidebar,
|
Sidebar,
|
||||||
@ -12,14 +13,20 @@ import {
|
|||||||
Columns3,
|
Columns3,
|
||||||
Inbox,
|
Inbox,
|
||||||
LayoutGrid,
|
LayoutGrid,
|
||||||
|
LoaderCircle,
|
||||||
MessageSquare,
|
MessageSquare,
|
||||||
Network,
|
Network,
|
||||||
|
Plus,
|
||||||
} from "lucide-react";
|
} from "lucide-react";
|
||||||
import { NavMain } from "./nav-main";
|
import { NavMain } from "./nav-main";
|
||||||
import { useUser } from "~/hooks/useUser";
|
import { useUser } from "~/hooks/useUser";
|
||||||
import { NavUser } from "./nav-user";
|
import { NavUser } from "./nav-user";
|
||||||
import Logo from "../logo/logo";
|
import Logo from "../logo/logo";
|
||||||
import { ConversationList } from "../conversation";
|
import { ConversationList } from "../conversation";
|
||||||
|
import { Button } from "../ui";
|
||||||
|
import { Project } from "../icons/project";
|
||||||
|
import { AddMemoryCommand } from "../command-bar/add-memory-command";
|
||||||
|
import { AddMemoryDialog } from "../command-bar/memory-dialog.client";
|
||||||
|
|
||||||
const data = {
|
const data = {
|
||||||
navMain: [
|
navMain: [
|
||||||
@ -41,7 +48,7 @@ const data = {
|
|||||||
{
|
{
|
||||||
title: "Spaces",
|
title: "Spaces",
|
||||||
url: "/home/space",
|
url: "/home/space",
|
||||||
icon: Columns3,
|
icon: Project,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: "Integrations",
|
title: "Integrations",
|
||||||
@ -54,33 +61,57 @@ const data = {
|
|||||||
export function AppSidebar({ ...props }: React.ComponentProps<typeof Sidebar>) {
|
export function AppSidebar({ ...props }: React.ComponentProps<typeof Sidebar>) {
|
||||||
const user = useUser();
|
const user = useUser();
|
||||||
|
|
||||||
return (
|
const [showAddMemory, setShowAddMemory] = React.useState(false);
|
||||||
<Sidebar
|
|
||||||
variant="inset"
|
|
||||||
{...props}
|
|
||||||
className="bg-background h-[100vh] py-2"
|
|
||||||
>
|
|
||||||
<SidebarHeader>
|
|
||||||
<SidebarMenu>
|
|
||||||
<SidebarMenuItem>
|
|
||||||
<div className="mt-1 ml-1 flex w-full items-center justify-start gap-2">
|
|
||||||
<Logo size={20} />
|
|
||||||
C.O.R.E.
|
|
||||||
</div>
|
|
||||||
</SidebarMenuItem>
|
|
||||||
</SidebarMenu>
|
|
||||||
</SidebarHeader>
|
|
||||||
<SidebarContent>
|
|
||||||
<NavMain items={data.navMain} />
|
|
||||||
<div className="mt-4 flex h-full flex-col">
|
|
||||||
<h2 className="text-muted-foreground px-4 text-sm"> History </h2>
|
|
||||||
<ConversationList />
|
|
||||||
</div>
|
|
||||||
</SidebarContent>
|
|
||||||
|
|
||||||
<SidebarFooter className="px-2">
|
// Open command bar with Meta+K (Cmd+K on Mac, Ctrl+K on Windows/Linux)
|
||||||
<NavUser user={user} />
|
useHotkeys("meta+k", (e) => {
|
||||||
</SidebarFooter>
|
e.preventDefault();
|
||||||
</Sidebar>
|
setShowAddMemory(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<Sidebar
|
||||||
|
variant="inset"
|
||||||
|
{...props}
|
||||||
|
className="bg-background h-[100vh] py-2"
|
||||||
|
>
|
||||||
|
<SidebarHeader>
|
||||||
|
<SidebarMenu>
|
||||||
|
<SidebarMenuItem className="flex justify-center">
|
||||||
|
<div className="mt-1 ml-1 flex w-full items-center justify-start gap-2">
|
||||||
|
<Logo size={20} />
|
||||||
|
C.O.R.E.
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Button
|
||||||
|
variant="secondary"
|
||||||
|
isActive
|
||||||
|
size="sm"
|
||||||
|
className="rounded"
|
||||||
|
onClick={() => setShowAddMemory(true)}
|
||||||
|
>
|
||||||
|
<Plus size={16} />
|
||||||
|
</Button>
|
||||||
|
</SidebarMenuItem>
|
||||||
|
</SidebarMenu>
|
||||||
|
</SidebarHeader>
|
||||||
|
<SidebarContent>
|
||||||
|
<NavMain items={data.navMain} />
|
||||||
|
<div className="mt-4 flex h-full flex-col">
|
||||||
|
<h2 className="text-muted-foreground px-4 text-sm"> History </h2>
|
||||||
|
<ConversationList />
|
||||||
|
</div>
|
||||||
|
</SidebarContent>
|
||||||
|
|
||||||
|
<SidebarFooter className="flex flex-col px-2">
|
||||||
|
<NavUser user={user} />
|
||||||
|
</SidebarFooter>
|
||||||
|
</Sidebar>
|
||||||
|
|
||||||
|
{showAddMemory && (
|
||||||
|
<AddMemoryDialog open={showAddMemory} onOpenChange={setShowAddMemory} />
|
||||||
|
)}
|
||||||
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -53,7 +53,7 @@ export function NavUser({ user }: { user: ExtendedUser }) {
|
|||||||
<DropdownMenuSeparator />
|
<DropdownMenuSeparator />
|
||||||
<DropdownMenuItem
|
<DropdownMenuItem
|
||||||
className="flex gap-2"
|
className="flex gap-2"
|
||||||
onClick={() => navigate("/settings/api")}
|
onClick={() => navigate("/settings/account")}
|
||||||
>
|
>
|
||||||
<Settings size={16} />
|
<Settings size={16} />
|
||||||
Settings
|
Settings
|
||||||
@ -67,6 +67,15 @@ export function NavUser({ user }: { user: ExtendedUser }) {
|
|||||||
</DropdownMenuItem>
|
</DropdownMenuItem>
|
||||||
</DropdownMenuContent>
|
</DropdownMenuContent>
|
||||||
</DropdownMenu>
|
</DropdownMenu>
|
||||||
|
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
onClick={() => {
|
||||||
|
navigate("/settings/billing");
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<div>{user.availableCredits} credits</div>
|
||||||
|
</Button>
|
||||||
</SidebarMenuItem>
|
</SidebarMenuItem>
|
||||||
</SidebarMenu>
|
</SidebarMenu>
|
||||||
);
|
);
|
||||||
|
|||||||
@ -17,8 +17,8 @@ interface SpaceCardProps {
|
|||||||
createdAt: string;
|
createdAt: string;
|
||||||
updatedAt: string;
|
updatedAt: string;
|
||||||
autoMode: boolean;
|
autoMode: boolean;
|
||||||
statementCount: number | null;
|
|
||||||
summary: string | null;
|
summary: string | null;
|
||||||
|
contextCount?: number | null;
|
||||||
themes?: string[];
|
themes?: string[];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -46,13 +46,17 @@ export function SpaceCard({ space }: SpaceCardProps) {
|
|||||||
</div>
|
</div>
|
||||||
<CardTitle className="text-base">{space.name}</CardTitle>
|
<CardTitle className="text-base">{space.name}</CardTitle>
|
||||||
<CardDescription className="line-clamp-2 text-xs">
|
<CardDescription className="line-clamp-2 text-xs">
|
||||||
{space.description || space.summary || "Knowledge space"}
|
<p
|
||||||
|
dangerouslySetInnerHTML={{
|
||||||
|
__html: space.description || space.summary || "Knowledge space",
|
||||||
|
}}
|
||||||
|
></p>
|
||||||
</CardDescription>
|
</CardDescription>
|
||||||
<div className="text-muted-foreground mt-2 flex items-center justify-between text-xs">
|
<div className="text-muted-foreground mt-2 flex items-center justify-between text-xs">
|
||||||
{space.statementCount && space.statementCount > 0 && (
|
{space.contextCount && space.contextCount > 0 && (
|
||||||
<div>
|
<div>
|
||||||
{space.statementCount} fact
|
{space.contextCount} episode
|
||||||
{space.statementCount !== 1 ? "s" : ""}
|
{space.contextCount !== 1 ? "s" : ""}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
167
apps/webapp/app/components/spaces/space-dropdown.tsx
Normal file
167
apps/webapp/app/components/spaces/space-dropdown.tsx
Normal file
@ -0,0 +1,167 @@
|
|||||||
|
import { useState, useEffect } from "react";
|
||||||
|
import { Check, Plus, X } from "lucide-react";
|
||||||
|
import { Button } from "~/components/ui/button";
|
||||||
|
import {
|
||||||
|
Popover,
|
||||||
|
PopoverContent,
|
||||||
|
PopoverPortal,
|
||||||
|
PopoverTrigger,
|
||||||
|
} from "~/components/ui/popover";
|
||||||
|
import {
|
||||||
|
Command,
|
||||||
|
CommandEmpty,
|
||||||
|
CommandGroup,
|
||||||
|
CommandInput,
|
||||||
|
CommandItem,
|
||||||
|
CommandList,
|
||||||
|
} from "~/components/ui/command";
|
||||||
|
import { Badge } from "~/components/ui/badge";
|
||||||
|
import { cn } from "~/lib/utils";
|
||||||
|
import { useFetcher } from "@remix-run/react";
|
||||||
|
import { Project } from "../icons/project";
|
||||||
|
|
||||||
|
interface Space {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
description?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SpaceDropdownProps {
|
||||||
|
episodeIds: string[];
|
||||||
|
selectedSpaceIds?: string[];
|
||||||
|
onSpaceChange?: (spaceIds: string[]) => void;
|
||||||
|
className?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function SpaceDropdown({
|
||||||
|
episodeIds,
|
||||||
|
selectedSpaceIds = [],
|
||||||
|
onSpaceChange,
|
||||||
|
className,
|
||||||
|
}: SpaceDropdownProps) {
|
||||||
|
const [open, setOpen] = useState(false);
|
||||||
|
const [selectedSpaces, setSelectedSpaces] =
|
||||||
|
useState<string[]>(selectedSpaceIds);
|
||||||
|
const [spaces, setSpaces] = useState<Space[]>([]);
|
||||||
|
const spacesFetcher = useFetcher<{ spaces: Space[] }>();
|
||||||
|
const assignFetcher = useFetcher();
|
||||||
|
|
||||||
|
// Fetch all spaces
|
||||||
|
useEffect(() => {
|
||||||
|
spacesFetcher.load("/api/v1/spaces");
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Update spaces when data is fetched
|
||||||
|
useEffect(() => {
|
||||||
|
if (spacesFetcher.data?.spaces) {
|
||||||
|
setSpaces(spacesFetcher.data.spaces);
|
||||||
|
}
|
||||||
|
}, [spacesFetcher.data]);
|
||||||
|
|
||||||
|
const handleSpaceToggle = (spaceId: string) => {
|
||||||
|
const newSelectedSpaces = selectedSpaces.includes(spaceId)
|
||||||
|
? selectedSpaces.filter((id) => id !== spaceId)
|
||||||
|
: [...selectedSpaces, spaceId];
|
||||||
|
|
||||||
|
setSelectedSpaces(newSelectedSpaces);
|
||||||
|
if (episodeIds) {
|
||||||
|
assignFetcher.submit(
|
||||||
|
{
|
||||||
|
episodeIds: JSON.stringify(episodeIds),
|
||||||
|
spaceId,
|
||||||
|
action: selectedSpaces.includes(spaceId) ? "remove" : "assign",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
method: "post",
|
||||||
|
action: "/api/v1/episodes/assign-space",
|
||||||
|
encType: "application/json",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Call the callback if provided
|
||||||
|
if (onSpaceChange) {
|
||||||
|
onSpaceChange(newSelectedSpaces);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const selectedSpaceObjects = spaces.filter((space) =>
|
||||||
|
selectedSpaces.includes(space.id),
|
||||||
|
);
|
||||||
|
|
||||||
|
const getTrigger = () => {
|
||||||
|
if (selectedSpaceObjects?.length === 1) {
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<Project size={14} /> {selectedSpaceObjects[0].name}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (selectedSpaceObjects?.length > 1) {
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<Project size={14} /> {selectedSpaceObjects.length} Spaces
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
{" "}
|
||||||
|
<Project size={14} />
|
||||||
|
Spaces
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={cn("flex flex-wrap items-center gap-2", className)}>
|
||||||
|
{/* + button to add more spaces */}
|
||||||
|
<Popover open={open} onOpenChange={setOpen}>
|
||||||
|
<PopoverTrigger asChild>
|
||||||
|
<Button
|
||||||
|
variant="secondary"
|
||||||
|
size="sm"
|
||||||
|
role="combobox"
|
||||||
|
aria-expanded={open}
|
||||||
|
className="h-7 gap-1 rounded"
|
||||||
|
>
|
||||||
|
{getTrigger()}
|
||||||
|
</Button>
|
||||||
|
</PopoverTrigger>
|
||||||
|
<PopoverPortal>
|
||||||
|
<PopoverContent className="w-[250px] p-0" align="end">
|
||||||
|
<Command>
|
||||||
|
<CommandInput placeholder="Search spaces..." />
|
||||||
|
<CommandList>
|
||||||
|
<CommandEmpty>No spaces found.</CommandEmpty>
|
||||||
|
<CommandGroup>
|
||||||
|
{spaces.map((space) => (
|
||||||
|
<CommandItem
|
||||||
|
key={space.id}
|
||||||
|
value={space.name}
|
||||||
|
onSelect={() => handleSpaceToggle(space.id)}
|
||||||
|
>
|
||||||
|
<Check
|
||||||
|
className={cn(
|
||||||
|
"mr-2 h-4 w-4",
|
||||||
|
selectedSpaces.includes(space.id)
|
||||||
|
? "opacity-100"
|
||||||
|
: "opacity-0",
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
<div className="flex flex-col">
|
||||||
|
<span className="text-sm">{space.name}</span>
|
||||||
|
</div>
|
||||||
|
</CommandItem>
|
||||||
|
))}
|
||||||
|
</CommandGroup>
|
||||||
|
</CommandList>
|
||||||
|
</Command>
|
||||||
|
</PopoverContent>
|
||||||
|
</PopoverPortal>
|
||||||
|
</Popover>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
112
apps/webapp/app/components/spaces/space-episode-actions.tsx
Normal file
112
apps/webapp/app/components/spaces/space-episode-actions.tsx
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
import { EllipsisVertical, Trash } from "lucide-react";
|
||||||
|
import {
|
||||||
|
DropdownMenu,
|
||||||
|
DropdownMenuContent,
|
||||||
|
DropdownMenuItem,
|
||||||
|
DropdownMenuTrigger,
|
||||||
|
} from "../ui/dropdown-menu";
|
||||||
|
import { Button } from "../ui/button";
|
||||||
|
import {
|
||||||
|
AlertDialog,
|
||||||
|
AlertDialogAction,
|
||||||
|
AlertDialogCancel,
|
||||||
|
AlertDialogContent,
|
||||||
|
AlertDialogDescription,
|
||||||
|
AlertDialogFooter,
|
||||||
|
AlertDialogHeader,
|
||||||
|
AlertDialogTitle,
|
||||||
|
} from "../ui/alert-dialog";
|
||||||
|
import { useEffect, useState } from "react";
|
||||||
|
import { useFetcher, useNavigate } from "@remix-run/react";
|
||||||
|
import { toast } from "~/hooks/use-toast";
|
||||||
|
|
||||||
|
interface SpaceEpisodeActionsProps {
|
||||||
|
episodeId: string;
|
||||||
|
spaceId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const SpaceEpisodeActions = ({
|
||||||
|
episodeId,
|
||||||
|
spaceId,
|
||||||
|
}: SpaceEpisodeActionsProps) => {
|
||||||
|
const [removeDialogOpen, setRemoveDialogOpen] = useState(false);
|
||||||
|
const removeFetcher = useFetcher();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
|
||||||
|
const handleRemove = () => {
|
||||||
|
removeFetcher.submit(
|
||||||
|
{
|
||||||
|
episodeIds: JSON.stringify([episodeId]),
|
||||||
|
spaceId,
|
||||||
|
action: "remove",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
method: "post",
|
||||||
|
action: "/api/v1/episodes/assign-space",
|
||||||
|
encType: "application/json",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
setRemoveDialogOpen(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (removeFetcher.state === "idle" && removeFetcher.data) {
|
||||||
|
if (removeFetcher.data.success) {
|
||||||
|
toast({
|
||||||
|
title: "Success",
|
||||||
|
description: "Episode removed from space",
|
||||||
|
});
|
||||||
|
// Reload the page to refresh the episode list
|
||||||
|
navigate(".", { replace: true });
|
||||||
|
} else {
|
||||||
|
toast({
|
||||||
|
title: "Error",
|
||||||
|
description: removeFetcher.data.error || "Failed to remove episode",
|
||||||
|
variant: "destructive",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [removeFetcher.state, removeFetcher.data, navigate]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<DropdownMenu>
|
||||||
|
<DropdownMenuTrigger asChild>
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
className="h-6 w-6 shrink-0 items-center justify-center p-0 opacity-0 transition-opacity group-hover:opacity-100"
|
||||||
|
onClick={(e) => e.stopPropagation()}
|
||||||
|
>
|
||||||
|
<EllipsisVertical size={16} />
|
||||||
|
</Button>
|
||||||
|
</DropdownMenuTrigger>
|
||||||
|
|
||||||
|
<DropdownMenuContent align="end" onClick={(e) => e.stopPropagation()}>
|
||||||
|
<DropdownMenuItem onClick={() => setRemoveDialogOpen(true)}>
|
||||||
|
<Button variant="link" size="sm" className="gap-2 rounded">
|
||||||
|
<Trash size={15} /> Remove from space
|
||||||
|
</Button>
|
||||||
|
</DropdownMenuItem>
|
||||||
|
</DropdownMenuContent>
|
||||||
|
</DropdownMenu>
|
||||||
|
|
||||||
|
<AlertDialog open={removeDialogOpen} onOpenChange={setRemoveDialogOpen}>
|
||||||
|
<AlertDialogContent onClick={(e) => e.stopPropagation()}>
|
||||||
|
<AlertDialogHeader>
|
||||||
|
<AlertDialogTitle>Remove from space</AlertDialogTitle>
|
||||||
|
<AlertDialogDescription>
|
||||||
|
Are you sure you want to remove this episode from the space? This
|
||||||
|
will not delete the episode itself.
|
||||||
|
</AlertDialogDescription>
|
||||||
|
</AlertDialogHeader>
|
||||||
|
<AlertDialogFooter>
|
||||||
|
<AlertDialogCancel>Cancel</AlertDialogCancel>
|
||||||
|
<AlertDialogAction onClick={handleRemove}>
|
||||||
|
Remove
|
||||||
|
</AlertDialogAction>
|
||||||
|
</AlertDialogFooter>
|
||||||
|
</AlertDialogContent>
|
||||||
|
</AlertDialog>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
||||||
@ -2,12 +2,30 @@ import { Calendar } from "lucide-react";
|
|||||||
import { Badge } from "~/components/ui/badge";
|
import { Badge } from "~/components/ui/badge";
|
||||||
import type { StatementNode } from "@core/types";
|
import type { StatementNode } from "@core/types";
|
||||||
import { cn } from "~/lib/utils";
|
import { cn } from "~/lib/utils";
|
||||||
|
import { useNavigate } from "@remix-run/react";
|
||||||
|
import Markdown from "react-markdown";
|
||||||
|
import { StyledMarkdown } from "../common/styled-markdown";
|
||||||
|
import { SpaceEpisodeActions } from "./space-episode-actions";
|
||||||
|
|
||||||
interface SpaceFactCardProps {
|
export interface Episode {
|
||||||
fact: StatementNode;
|
uuid: string;
|
||||||
|
content: string;
|
||||||
|
originalContent: string;
|
||||||
|
source: any;
|
||||||
|
createdAt: Date;
|
||||||
|
validAt: Date;
|
||||||
|
metadata: any;
|
||||||
|
sessionId: any;
|
||||||
|
logId?: any;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function SpaceFactCard({ fact }: SpaceFactCardProps) {
|
interface SpaceFactCardProps {
|
||||||
|
episode: Episode;
|
||||||
|
spaceId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function SpaceEpisodeCard({ episode, spaceId }: SpaceFactCardProps) {
|
||||||
|
const navigate = useNavigate();
|
||||||
const formatDate = (date: Date | string) => {
|
const formatDate = (date: Date | string) => {
|
||||||
const d = new Date(date);
|
const d = new Date(date);
|
||||||
return d.toLocaleDateString("en-US", {
|
return d.toLocaleDateString("en-US", {
|
||||||
@ -17,18 +35,20 @@ export function SpaceFactCard({ fact }: SpaceFactCardProps) {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const displayText = fact.fact;
|
const displayText = episode.originalContent;
|
||||||
|
|
||||||
const recallCount =
|
const onClick = () => {
|
||||||
(fact.recallCount?.high ?? 0) + (fact.recallCount?.low ?? 0);
|
navigate(`/home/inbox/${episode.logId}`);
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="flex w-full items-center px-5 pr-2">
|
<div className="group flex w-full items-center px-5 pr-2">
|
||||||
<div
|
<div
|
||||||
className={cn(
|
className={cn(
|
||||||
"group-hover:bg-grayAlpha-100 flex min-w-[0px] shrink grow items-start gap-2 rounded-md px-3",
|
"group-hover:bg-grayAlpha-100 flex min-w-[0px] shrink grow cursor-pointer items-start gap-2 rounded-md px-3",
|
||||||
)}
|
)}
|
||||||
|
onClick={onClick}
|
||||||
>
|
>
|
||||||
<div
|
<div
|
||||||
className={cn(
|
className={cn(
|
||||||
@ -37,19 +57,14 @@ export function SpaceFactCard({ fact }: SpaceFactCardProps) {
|
|||||||
>
|
>
|
||||||
<div className="flex w-full items-center justify-between gap-4">
|
<div className="flex w-full items-center justify-between gap-4">
|
||||||
<div className="inline-flex min-h-[24px] min-w-[0px] shrink items-center justify-start">
|
<div className="inline-flex min-h-[24px] min-w-[0px] shrink items-center justify-start">
|
||||||
<div className={cn("truncate text-left")}>{displayText}</div>
|
<StyledMarkdown>{displayText.slice(0, 300)}</StyledMarkdown>
|
||||||
</div>
|
</div>
|
||||||
<div className="text-muted-foreground flex shrink-0 items-center justify-end gap-2 text-xs">
|
<div className="text-muted-foreground flex shrink-0 items-center justify-end gap-2 text-xs">
|
||||||
{!!recallCount && <span>Recalled: {recallCount} times</span>}
|
|
||||||
<Badge variant="secondary" className="rounded text-xs">
|
<Badge variant="secondary" className="rounded text-xs">
|
||||||
<Calendar className="h-3 w-3" />
|
<Calendar className="h-3 w-3" />
|
||||||
{formatDate(fact.validAt)}
|
{formatDate(episode.validAt)}
|
||||||
</Badge>
|
</Badge>
|
||||||
{fact.invalidAt && (
|
<SpaceEpisodeActions episodeId={episode.uuid} spaceId={spaceId} />
|
||||||
<Badge variant="destructive" className="rounded text-xs">
|
|
||||||
Invalid since {formatDate(fact.invalidAt)}
|
|
||||||
</Badge>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -9,7 +9,7 @@ import {
|
|||||||
} from "~/components/ui/popover";
|
} from "~/components/ui/popover";
|
||||||
import { Badge } from "~/components/ui/badge";
|
import { Badge } from "~/components/ui/badge";
|
||||||
|
|
||||||
interface SpaceFactsFiltersProps {
|
interface SpaceEpisodesFiltersProps {
|
||||||
selectedValidDate?: string;
|
selectedValidDate?: string;
|
||||||
selectedSpaceFilter?: string;
|
selectedSpaceFilter?: string;
|
||||||
onValidDateChange: (date?: string) => void;
|
onValidDateChange: (date?: string) => void;
|
||||||
@ -22,34 +22,24 @@ const validDateOptions = [
|
|||||||
{ value: "last_6_months", label: "Last 6 Months" },
|
{ value: "last_6_months", label: "Last 6 Months" },
|
||||||
];
|
];
|
||||||
|
|
||||||
const spaceFilterOptions = [
|
type FilterStep = "main" | "validDate";
|
||||||
{ value: "active", label: "Active Facts" },
|
|
||||||
{ value: "archived", label: "Archived Facts" },
|
|
||||||
{ value: "all", label: "All Facts" },
|
|
||||||
];
|
|
||||||
|
|
||||||
type FilterStep = "main" | "validDate" | "spaceFilter";
|
export function SpaceEpisodesFilters({
|
||||||
|
|
||||||
export function SpaceFactsFilters({
|
|
||||||
selectedValidDate,
|
selectedValidDate,
|
||||||
selectedSpaceFilter,
|
selectedSpaceFilter,
|
||||||
onValidDateChange,
|
onValidDateChange,
|
||||||
onSpaceFilterChange,
|
}: SpaceEpisodesFiltersProps) {
|
||||||
}: SpaceFactsFiltersProps) {
|
|
||||||
const [popoverOpen, setPopoverOpen] = useState(false);
|
const [popoverOpen, setPopoverOpen] = useState(false);
|
||||||
const [step, setStep] = useState<FilterStep>("main");
|
const [step, setStep] = useState<FilterStep>("main");
|
||||||
|
|
||||||
const selectedValidDateLabel = validDateOptions.find(
|
const selectedValidDateLabel = validDateOptions.find(
|
||||||
(d) => d.value === selectedValidDate,
|
(d) => d.value === selectedValidDate,
|
||||||
)?.label;
|
)?.label;
|
||||||
const selectedSpaceFilterLabel = spaceFilterOptions.find(
|
|
||||||
(f) => f.value === selectedSpaceFilter,
|
|
||||||
)?.label;
|
|
||||||
|
|
||||||
const hasFilters = selectedValidDate || selectedSpaceFilter;
|
const hasFilters = selectedValidDate || selectedSpaceFilter;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="mb-2 flex w-full items-center justify-start gap-2 px-5">
|
<>
|
||||||
<Popover
|
<Popover
|
||||||
open={popoverOpen}
|
open={popoverOpen}
|
||||||
onOpenChange={(open) => {
|
onOpenChange={(open) => {
|
||||||
@ -79,13 +69,6 @@ export function SpaceFactsFilters({
|
|||||||
>
|
>
|
||||||
Valid Date
|
Valid Date
|
||||||
</Button>
|
</Button>
|
||||||
<Button
|
|
||||||
variant="ghost"
|
|
||||||
className="justify-start"
|
|
||||||
onClick={() => setStep("spaceFilter")}
|
|
||||||
>
|
|
||||||
Status
|
|
||||||
</Button>
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
@ -122,40 +105,6 @@ export function SpaceFactsFilters({
|
|||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{step === "spaceFilter" && (
|
|
||||||
<div className="flex flex-col gap-1 p-2">
|
|
||||||
<Button
|
|
||||||
variant="ghost"
|
|
||||||
className="w-full justify-start"
|
|
||||||
onClick={() => {
|
|
||||||
onSpaceFilterChange(undefined);
|
|
||||||
setPopoverOpen(false);
|
|
||||||
setStep("main");
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
All Facts
|
|
||||||
</Button>
|
|
||||||
{spaceFilterOptions.map((option) => (
|
|
||||||
<Button
|
|
||||||
key={option.value}
|
|
||||||
variant="ghost"
|
|
||||||
className="w-full justify-start"
|
|
||||||
onClick={() => {
|
|
||||||
onSpaceFilterChange(
|
|
||||||
option.value === selectedSpaceFilter
|
|
||||||
? undefined
|
|
||||||
: option.value,
|
|
||||||
);
|
|
||||||
setPopoverOpen(false);
|
|
||||||
setStep("main");
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{option.label}
|
|
||||||
</Button>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</PopoverContent>
|
</PopoverContent>
|
||||||
</PopoverPortal>
|
</PopoverPortal>
|
||||||
</Popover>
|
</Popover>
|
||||||
@ -172,17 +121,8 @@ export function SpaceFactsFilters({
|
|||||||
/>
|
/>
|
||||||
</Badge>
|
</Badge>
|
||||||
)}
|
)}
|
||||||
{selectedSpaceFilter && (
|
|
||||||
<Badge variant="secondary" className="h-7 gap-1 rounded px-2">
|
|
||||||
{selectedSpaceFilterLabel}
|
|
||||||
<X
|
|
||||||
className="hover:text-destructive h-3.5 w-3.5 cursor-pointer"
|
|
||||||
onClick={() => onSpaceFilterChange(undefined)}
|
|
||||||
/>
|
|
||||||
</Badge>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -9,25 +9,26 @@ import {
|
|||||||
} from "react-virtualized";
|
} from "react-virtualized";
|
||||||
import { Database } from "lucide-react";
|
import { Database } from "lucide-react";
|
||||||
import { Card, CardContent } from "~/components/ui/card";
|
import { Card, CardContent } from "~/components/ui/card";
|
||||||
import type { StatementNode } from "@core/types";
|
|
||||||
import { ScrollManagedList } from "../virtualized-list";
|
import { ScrollManagedList } from "../virtualized-list";
|
||||||
import { SpaceFactCard } from "./space-fact-card";
|
import { type Episode, SpaceEpisodeCard } from "./space-episode-card";
|
||||||
|
|
||||||
interface SpaceFactsListProps {
|
interface SpaceEpisodesListProps {
|
||||||
facts: any[];
|
episodes: any[];
|
||||||
hasMore: boolean;
|
hasMore: boolean;
|
||||||
loadMore: () => void;
|
loadMore: () => void;
|
||||||
isLoading: boolean;
|
isLoading: boolean;
|
||||||
height?: number;
|
height?: number;
|
||||||
|
spaceId: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
function FactItemRenderer(
|
function EpisodeItemRenderer(
|
||||||
props: ListRowProps,
|
props: ListRowProps,
|
||||||
facts: StatementNode[],
|
episodes: Episode[],
|
||||||
cache: CellMeasurerCache,
|
cache: CellMeasurerCache,
|
||||||
|
spaceId: string,
|
||||||
) {
|
) {
|
||||||
const { index, key, style, parent } = props;
|
const { index, key, style, parent } = props;
|
||||||
const fact = facts[index];
|
const episode = episodes[index];
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<CellMeasurer
|
<CellMeasurer
|
||||||
@ -38,23 +39,24 @@ function FactItemRenderer(
|
|||||||
rowIndex={index}
|
rowIndex={index}
|
||||||
>
|
>
|
||||||
<div key={key} style={style} className="pb-2">
|
<div key={key} style={style} className="pb-2">
|
||||||
<SpaceFactCard fact={fact} />
|
<SpaceEpisodeCard episode={episode} spaceId={spaceId} />
|
||||||
</div>
|
</div>
|
||||||
</CellMeasurer>
|
</CellMeasurer>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function SpaceFactsList({
|
export function SpaceEpisodesList({
|
||||||
facts,
|
episodes,
|
||||||
hasMore,
|
hasMore,
|
||||||
loadMore,
|
loadMore,
|
||||||
isLoading,
|
isLoading,
|
||||||
}: SpaceFactsListProps) {
|
spaceId,
|
||||||
|
}: SpaceEpisodesListProps) {
|
||||||
// Create a CellMeasurerCache instance using useRef to prevent recreation
|
// Create a CellMeasurerCache instance using useRef to prevent recreation
|
||||||
const cacheRef = useRef<CellMeasurerCache | null>(null);
|
const cacheRef = useRef<CellMeasurerCache | null>(null);
|
||||||
if (!cacheRef.current) {
|
if (!cacheRef.current) {
|
||||||
cacheRef.current = new CellMeasurerCache({
|
cacheRef.current = new CellMeasurerCache({
|
||||||
defaultHeight: 200, // Default row height for fact cards
|
defaultHeight: 200, // Default row height for episode cards
|
||||||
fixedWidth: true, // Rows have fixed width but dynamic height
|
fixedWidth: true, // Rows have fixed width but dynamic height
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -62,17 +64,17 @@ export function SpaceFactsList({
|
|||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
cache.clearAll();
|
cache.clearAll();
|
||||||
}, [facts, cache]);
|
}, [episodes, cache]);
|
||||||
|
|
||||||
if (facts.length === 0 && !isLoading) {
|
if (episodes.length === 0 && !isLoading) {
|
||||||
return (
|
return (
|
||||||
<Card className="bg-background-2 w-full">
|
<Card className="bg-background-2 w-full">
|
||||||
<CardContent className="bg-background-2 flex w-full items-center justify-center py-16">
|
<CardContent className="bg-background-2 flex w-full items-center justify-center py-16">
|
||||||
<div className="text-center">
|
<div className="text-center">
|
||||||
<Database className="text-muted-foreground mx-auto mb-4 h-12 w-12" />
|
<Database className="text-muted-foreground mx-auto mb-4 h-12 w-12" />
|
||||||
<h3 className="mb-2 text-lg font-semibold">No facts found</h3>
|
<h3 className="mb-2 text-lg font-semibold">No Episodes found</h3>
|
||||||
<p className="text-muted-foreground">
|
<p className="text-muted-foreground">
|
||||||
This space doesn't contain any facts yet.
|
This space doesn't contain any episodes yet.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
@ -81,7 +83,7 @@ export function SpaceFactsList({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const isRowLoaded = ({ index }: { index: number }) => {
|
const isRowLoaded = ({ index }: { index: number }) => {
|
||||||
return !!facts[index];
|
return !!episodes[index];
|
||||||
};
|
};
|
||||||
|
|
||||||
const loadMoreRows = async () => {
|
const loadMoreRows = async () => {
|
||||||
@ -92,14 +94,14 @@ export function SpaceFactsList({
|
|||||||
};
|
};
|
||||||
|
|
||||||
const rowRenderer = (props: ListRowProps) => {
|
const rowRenderer = (props: ListRowProps) => {
|
||||||
return FactItemRenderer(props, facts, cache);
|
return EpisodeItemRenderer(props, episodes, cache, spaceId);
|
||||||
};
|
};
|
||||||
|
|
||||||
const rowHeight = ({ index }: Index) => {
|
const rowHeight = ({ index }: Index) => {
|
||||||
return cache.getHeight(index, 0);
|
return cache.getHeight(index, 0);
|
||||||
};
|
};
|
||||||
|
|
||||||
const itemCount = hasMore ? facts.length + 1 : facts.length;
|
const itemCount = hasMore ? episodes.length + 1 : episodes.length;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="h-full grow overflow-hidden rounded-lg">
|
<div className="h-full grow overflow-hidden rounded-lg">
|
||||||
@ -131,7 +133,7 @@ export function SpaceFactsList({
|
|||||||
|
|
||||||
{isLoading && (
|
{isLoading && (
|
||||||
<div className="text-muted-foreground p-4 text-center text-sm">
|
<div className="text-muted-foreground p-4 text-center text-sm">
|
||||||
Loading more facts...
|
Loading more episodes...
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
@ -1,4 +1,4 @@
|
|||||||
import { EllipsisVertical, RefreshCcw, Trash, Edit } from "lucide-react";
|
import { EllipsisVertical, RefreshCcw, Trash, Edit, Copy } from "lucide-react";
|
||||||
import {
|
import {
|
||||||
DropdownMenu,
|
DropdownMenu,
|
||||||
DropdownMenuContent,
|
DropdownMenuContent,
|
||||||
@ -19,6 +19,7 @@ import {
|
|||||||
import { useEffect, useState } from "react";
|
import { useEffect, useState } from "react";
|
||||||
import { useFetcher, useNavigate } from "@remix-run/react";
|
import { useFetcher, useNavigate } from "@remix-run/react";
|
||||||
import { EditSpaceDialog } from "./edit-space-dialog.client";
|
import { EditSpaceDialog } from "./edit-space-dialog.client";
|
||||||
|
import { toast } from "~/hooks/use-toast";
|
||||||
|
|
||||||
interface SpaceOptionsProps {
|
interface SpaceOptionsProps {
|
||||||
id: string;
|
id: string;
|
||||||
@ -64,6 +65,23 @@ export const SpaceOptions = ({ id, name, description }: SpaceOptionsProps) => {
|
|||||||
// revalidator.revalidate();
|
// revalidator.revalidate();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleCopy = async () => {
|
||||||
|
try {
|
||||||
|
await navigator.clipboard.writeText(id);
|
||||||
|
toast({
|
||||||
|
title: "Copied",
|
||||||
|
description: "Space ID copied to clipboard",
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Failed to copy:", err);
|
||||||
|
toast({
|
||||||
|
title: "Error",
|
||||||
|
description: "Failed to copy ID",
|
||||||
|
variant: "destructive",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<DropdownMenu>
|
<DropdownMenu>
|
||||||
@ -79,6 +97,11 @@ export const SpaceOptions = ({ id, name, description }: SpaceOptionsProps) => {
|
|||||||
</DropdownMenuTrigger>
|
</DropdownMenuTrigger>
|
||||||
|
|
||||||
<DropdownMenuContent align="end">
|
<DropdownMenuContent align="end">
|
||||||
|
<DropdownMenuItem onClick={handleCopy}>
|
||||||
|
<Button variant="link" size="sm" className="gap-2 rounded">
|
||||||
|
<Copy size={15} /> Copy Id
|
||||||
|
</Button>
|
||||||
|
</DropdownMenuItem>
|
||||||
<DropdownMenuItem onClick={() => setEditDialogOpen(true)}>
|
<DropdownMenuItem onClick={() => setEditDialogOpen(true)}>
|
||||||
<Button variant="link" size="sm" className="gap-2 rounded">
|
<Button variant="link" size="sm" className="gap-2 rounded">
|
||||||
<Edit size={15} /> Edit
|
<Edit size={15} /> Edit
|
||||||
|
|||||||
@ -9,8 +9,8 @@ interface SpacesGridProps {
|
|||||||
createdAt: string;
|
createdAt: string;
|
||||||
updatedAt: string;
|
updatedAt: string;
|
||||||
autoMode: boolean;
|
autoMode: boolean;
|
||||||
statementCount: number | null;
|
|
||||||
summary: string | null;
|
summary: string | null;
|
||||||
|
contextCount?: number | null;
|
||||||
themes?: string[];
|
themes?: string[];
|
||||||
}>;
|
}>;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -40,7 +40,7 @@ const CommandDialog = ({
|
|||||||
<Dialog {...props}>
|
<Dialog {...props}>
|
||||||
<DialogContent className={cn("overflow-hidden p-0 font-sans")}>
|
<DialogContent className={cn("overflow-hidden p-0 font-sans")}>
|
||||||
<Command
|
<Command
|
||||||
className="[&_[cmdk-group-heading]]:text-muted-foreground [&_[cmdk-group-heading]]:font-medium [&_[cmdk-group]:not([hidden])_~[cmdk-group]]:pt-0 [&_[cmdk-input-wrapper]_svg]:h-5 [&_[cmdk-input-wrapper]_svg]:w-5 [&_[cmdk-input]]:h-12 [&_[cmdk-item]]:px-2 [&_[cmdk-item]]:py-3 [&_[cmdk-item]_svg]:h-5 [&_[cmdk-item]_svg]:w-5"
|
className="[&_[cmdk-group-heading]]:text-muted-foreground [&_[cmdk-group-heading]]:font-medium [&_[cmdk-group]:not([hidden])_~[cmdk-group]]:pt-0 [&_[cmdk-input-wrapper]_svg]:h-5 [&_[cmdk-input-wrapper]_svg]:w-5 [&_[cmdk-input]]:h-10 [&_[cmdk-item]]:px-2 [&_[cmdk-item]]:py-2 [&_[cmdk-item]_svg]:h-5 [&_[cmdk-item]_svg]:w-5"
|
||||||
{...commandProps}
|
{...commandProps}
|
||||||
>
|
>
|
||||||
{children}
|
{children}
|
||||||
@ -141,7 +141,7 @@ const CommandItem = React.forwardRef<
|
|||||||
<CommandPrimitive.Item
|
<CommandPrimitive.Item
|
||||||
ref={ref}
|
ref={ref}
|
||||||
className={cn(
|
className={cn(
|
||||||
"command-item aria-selected:bg-accent aria-selected:text-accent-foreground relative flex cursor-default items-center rounded-sm px-2 py-1 outline-none select-none data-[disabled]:pointer-events-none data-[disabled]:opacity-50",
|
"command-item aria-selected:bg-accent aria-selected:text-accent-foreground relative flex cursor-default items-center rounded px-2 py-1 outline-none select-none data-[disabled]:pointer-events-none data-[disabled]:opacity-50",
|
||||||
className,
|
className,
|
||||||
)}
|
)}
|
||||||
{...props}
|
{...props}
|
||||||
|
|||||||
@ -2,3 +2,5 @@ export * from "./button";
|
|||||||
export * from "./tabs";
|
export * from "./tabs";
|
||||||
export * from "./input";
|
export * from "./input";
|
||||||
export * from "./scrollarea";
|
export * from "./scrollarea";
|
||||||
|
export * from "./toast";
|
||||||
|
export * from "./toaster";
|
||||||
|
|||||||
52
apps/webapp/app/components/ui/progress.tsx
Normal file
52
apps/webapp/app/components/ui/progress.tsx
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
import * as ProgressPrimitive from "@radix-ui/react-progress";
|
||||||
|
|
||||||
|
import * as React from "react";
|
||||||
|
import { cn } from "~/lib/utils";
|
||||||
|
|
||||||
|
interface ProgressSegment {
|
||||||
|
value: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
type Props = React.ComponentPropsWithoutRef<typeof ProgressPrimitive.Root> & {
|
||||||
|
color?: string;
|
||||||
|
segments: ProgressSegment[];
|
||||||
|
};
|
||||||
|
|
||||||
|
const Progress = React.forwardRef<
|
||||||
|
React.ElementRef<typeof ProgressPrimitive.Root>,
|
||||||
|
Props
|
||||||
|
>(({ className, segments, color, ...props }, ref) => {
|
||||||
|
const sortedSegments = segments.sort((a, b) => b.value - a.value);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ProgressPrimitive.Root
|
||||||
|
ref={ref}
|
||||||
|
className={cn("relative h-2 w-full overflow-hidden rounded", className)}
|
||||||
|
style={{
|
||||||
|
backgroundColor: `${color}33`,
|
||||||
|
}}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
{sortedSegments.map((segment, index) => (
|
||||||
|
<ProgressPrimitive.Indicator
|
||||||
|
key={index}
|
||||||
|
className="bg-primary absolute top-0 h-full transition-all"
|
||||||
|
style={{
|
||||||
|
width: `${segment.value}%`,
|
||||||
|
left: "0%",
|
||||||
|
backgroundColor: `${color}${Math.round(
|
||||||
|
90 + ((100 - 30) * index) / (sortedSegments.length - 1),
|
||||||
|
)
|
||||||
|
.toString(16)
|
||||||
|
.padStart(2, "0")}`,
|
||||||
|
zIndex: sortedSegments.length - index,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</ProgressPrimitive.Root>
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
Progress.displayName = "Progress";
|
||||||
|
|
||||||
|
export { Progress };
|
||||||
133
apps/webapp/app/components/ui/toast.tsx
Normal file
133
apps/webapp/app/components/ui/toast.tsx
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
import { Cross2Icon } from "@radix-ui/react-icons";
|
||||||
|
import * as ToastPrimitives from "@radix-ui/react-toast";
|
||||||
|
import { cva, type VariantProps } from "class-variance-authority";
|
||||||
|
import React from "react";
|
||||||
|
|
||||||
|
import { cn } from "../../lib/utils";
|
||||||
|
|
||||||
|
const ToastProvider = ToastPrimitives.Provider;
|
||||||
|
|
||||||
|
const ToastViewport = React.forwardRef<
|
||||||
|
React.ElementRef<typeof ToastPrimitives.Viewport>,
|
||||||
|
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Viewport>
|
||||||
|
>(({ className, ...props }, ref) => (
|
||||||
|
<ToastPrimitives.Viewport
|
||||||
|
ref={ref}
|
||||||
|
className={cn(
|
||||||
|
"fixed top-0 z-[100] flex max-h-screen w-full flex-col-reverse p-4 sm:top-auto sm:right-0 sm:bottom-0 sm:flex-col md:max-w-[420px]",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
));
|
||||||
|
ToastViewport.displayName = ToastPrimitives.Viewport.displayName;
|
||||||
|
|
||||||
|
const toastVariants = cva(
|
||||||
|
"group pointer-events-auto relative flex w-full items-center justify-between space-x-2 overflow-hidden rounded-md border p-3 pr-6 shadow-lg transition-all data-[swipe=cancel]:translate-x-0 data-[swipe=end]:translate-x-[var(--radix-toast-swipe-end-x)] data-[swipe=move]:translate-x-[var(--radix-toast-swipe-move-x)] data-[swipe=move]:transition-none data-[state=open]:animate-in data-[state=closed]:animate-out data-[swipe=end]:animate-out data-[state=closed]:fade-out-80 data-[state=closed]:slide-out-to-right-full data-[state=open]:slide-in-from-top-full data-[state=open]:sm:slide-in-from-bottom-full",
|
||||||
|
{
|
||||||
|
variants: {
|
||||||
|
variant: {
|
||||||
|
default: "border bg-background text-foreground",
|
||||||
|
warning: "warning group border-warning bg-warning text-foreground",
|
||||||
|
success: "success group border-success bg-success text-foreground",
|
||||||
|
destructive:
|
||||||
|
"destructive group border-destructive bg-destructive text-foreground",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
defaultVariants: {
|
||||||
|
variant: "default",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const Toast = React.forwardRef<
|
||||||
|
React.ElementRef<typeof ToastPrimitives.Root>,
|
||||||
|
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Root> &
|
||||||
|
VariantProps<typeof toastVariants>
|
||||||
|
>(({ className, variant, ...props }, ref) => {
|
||||||
|
return (
|
||||||
|
<ToastPrimitives.Root
|
||||||
|
ref={ref}
|
||||||
|
className={cn(
|
||||||
|
toastVariants({ variant }),
|
||||||
|
className,
|
||||||
|
"shadow-1 rounded-md border-0 bg-gray-100 font-sans backdrop-blur-md",
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
});
|
||||||
|
Toast.displayName = ToastPrimitives.Root.displayName;
|
||||||
|
|
||||||
|
const ToastAction = React.forwardRef<
|
||||||
|
React.ElementRef<typeof ToastPrimitives.Action>,
|
||||||
|
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Action>
|
||||||
|
>(({ className, ...props }, ref) => (
|
||||||
|
<ToastPrimitives.Action
|
||||||
|
ref={ref}
|
||||||
|
className={cn(
|
||||||
|
"hover:bg-secondary focus:ring-ring group-[.destructive]:border-muted/40 group-[.destructive]:hover:border-destructive/30 group-[.destructive]:hover:bg-destructive group-[.destructive]:hover:text-destructive-foreground group-[.destructive]:focus:ring-destructive inline-flex h-8 shrink-0 items-center justify-center rounded-md border bg-transparent px-3 text-sm font-medium transition-colors focus:ring-1 focus:outline-none disabled:pointer-events-none disabled:opacity-50",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
));
|
||||||
|
ToastAction.displayName = ToastPrimitives.Action.displayName;
|
||||||
|
|
||||||
|
const ToastClose = React.forwardRef<
|
||||||
|
React.ElementRef<typeof ToastPrimitives.Close>,
|
||||||
|
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Close>
|
||||||
|
>(({ className, ...props }, ref) => (
|
||||||
|
<ToastPrimitives.Close
|
||||||
|
ref={ref}
|
||||||
|
className={cn(
|
||||||
|
"text-foreground/50 hover:text-foreground absolute top-1 right-1 rounded-md p-1 opacity-0 transition-opacity group-hover:opacity-100 group-[.destructive]:text-red-300 group-[.destructive]:hover:text-red-50 focus:opacity-100 focus:ring-1 focus:outline-none group-[.destructive]:focus:ring-red-400 group-[.destructive]:focus:ring-offset-red-600",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
toast-close=""
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
<Cross2Icon className="h-4 w-4" />
|
||||||
|
</ToastPrimitives.Close>
|
||||||
|
));
|
||||||
|
ToastClose.displayName = ToastPrimitives.Close.displayName;
|
||||||
|
|
||||||
|
const ToastTitle = React.forwardRef<
|
||||||
|
React.ElementRef<typeof ToastPrimitives.Title>,
|
||||||
|
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Title>
|
||||||
|
>(({ className, ...props }, ref) => (
|
||||||
|
<ToastPrimitives.Title
|
||||||
|
ref={ref}
|
||||||
|
className={cn("font-medium [&+div]:text-xs", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
));
|
||||||
|
ToastTitle.displayName = ToastPrimitives.Title.displayName;
|
||||||
|
|
||||||
|
const ToastDescription = React.forwardRef<
|
||||||
|
React.ElementRef<typeof ToastPrimitives.Description>,
|
||||||
|
React.ComponentPropsWithoutRef<typeof ToastPrimitives.Description>
|
||||||
|
>(({ className, ...props }, ref) => (
|
||||||
|
<ToastPrimitives.Description
|
||||||
|
ref={ref}
|
||||||
|
className={cn("opacity-90", className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
));
|
||||||
|
ToastDescription.displayName = ToastPrimitives.Description.displayName;
|
||||||
|
|
||||||
|
type ToastProps = React.ComponentPropsWithoutRef<typeof Toast>;
|
||||||
|
|
||||||
|
type ToastActionElement = React.ReactElement<typeof ToastAction>;
|
||||||
|
|
||||||
|
export {
|
||||||
|
type ToastProps,
|
||||||
|
type ToastActionElement,
|
||||||
|
ToastProvider,
|
||||||
|
ToastViewport,
|
||||||
|
Toast,
|
||||||
|
ToastTitle,
|
||||||
|
ToastDescription,
|
||||||
|
ToastClose,
|
||||||
|
ToastAction,
|
||||||
|
};
|
||||||
33
apps/webapp/app/components/ui/toaster.tsx
Normal file
33
apps/webapp/app/components/ui/toaster.tsx
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
import {
|
||||||
|
Toast,
|
||||||
|
ToastClose,
|
||||||
|
ToastDescription,
|
||||||
|
ToastProvider,
|
||||||
|
ToastTitle,
|
||||||
|
ToastViewport,
|
||||||
|
} from "~/components/ui/toast";
|
||||||
|
import { useToast } from "~/hooks/use-toast";
|
||||||
|
|
||||||
|
export function Toaster() {
|
||||||
|
const { toasts } = useToast();
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ToastProvider>
|
||||||
|
{toasts.map(function ({ id, title, description, action, ...props }) {
|
||||||
|
return (
|
||||||
|
<Toast key={id} {...props}>
|
||||||
|
<div className="grid gap-1">
|
||||||
|
{title && <ToastTitle>{title}</ToastTitle>}
|
||||||
|
{description && (
|
||||||
|
<ToastDescription>{description}</ToastDescription>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
{action}
|
||||||
|
<ToastClose />
|
||||||
|
</Toast>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
<ToastViewport />
|
||||||
|
</ToastProvider>
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -149,7 +149,7 @@ export const ScrollAreaWithAutoScroll = ({
|
|||||||
className?: string;
|
className?: string;
|
||||||
}) => {
|
}) => {
|
||||||
const { scrollRef } = useAutoScroll({
|
const { scrollRef } = useAutoScroll({
|
||||||
smooth: true,
|
smooth: false,
|
||||||
content: children,
|
content: children,
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -161,7 +161,7 @@ export const ScrollAreaWithAutoScroll = ({
|
|||||||
className,
|
className,
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
<div className="flex h-full w-full max-w-[97ch] flex-col pb-4">
|
<div className="flex h-full w-full max-w-[80ch] flex-col pb-4">
|
||||||
{children}
|
{children}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
120
apps/webapp/app/config/billing.server.ts
Normal file
120
apps/webapp/app/config/billing.server.ts
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
/**
|
||||||
|
* Billing Configuration
|
||||||
|
*
|
||||||
|
* This file centralizes all billing-related configuration.
|
||||||
|
* Billing is feature-flagged and can be disabled for self-hosted instances.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const BILLING_CONFIG = {
|
||||||
|
// Feature flag: Enable/disable billing system
|
||||||
|
// Self-hosted instances can set this to false for unlimited usage
|
||||||
|
enabled: process.env.ENABLE_BILLING === "true",
|
||||||
|
|
||||||
|
// Stripe configuration (only used if billing is enabled)
|
||||||
|
stripe: {
|
||||||
|
secretKey: process.env.STRIPE_SECRET_KEY,
|
||||||
|
publishableKey: process.env.STRIPE_PUBLISHABLE_KEY,
|
||||||
|
webhookSecret: process.env.STRIPE_WEBHOOK_SECRET,
|
||||||
|
meterEventName: process.env.STRIPE_METER_EVENT_NAME || "echo_credits_used",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Plan configurations
|
||||||
|
plans: {
|
||||||
|
free: {
|
||||||
|
name: "Free",
|
||||||
|
monthlyCredits: parseInt(process.env.FREE_PLAN_CREDITS || "200", 10),
|
||||||
|
enableOverage: false,
|
||||||
|
features: {
|
||||||
|
episodesPerMonth: 200,
|
||||||
|
searchesPerMonth: 200,
|
||||||
|
mcpIntegrations: 3,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
pro: {
|
||||||
|
name: "Pro",
|
||||||
|
monthlyCredits: parseInt(process.env.PRO_PLAN_CREDITS || "2000", 10),
|
||||||
|
enableOverage: true,
|
||||||
|
overagePrice: parseFloat(process.env.PRO_OVERAGE_PRICE || "0.01"), // $0.01 per credit
|
||||||
|
stripePriceId: process.env.PRO_PLAN_STRIPE_PRICE_ID,
|
||||||
|
features: {
|
||||||
|
episodesPerMonth: 2000,
|
||||||
|
searchesPerMonth: 2000,
|
||||||
|
mcpIntegrations: -1, // unlimited
|
||||||
|
prioritySupport: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
max: {
|
||||||
|
name: "Max",
|
||||||
|
monthlyCredits: parseInt(process.env.MAX_PLAN_CREDITS || "10000", 10),
|
||||||
|
enableOverage: true,
|
||||||
|
overagePrice: parseFloat(process.env.MAX_OVERAGE_PRICE || "0.008"), // $0.008 per credit (cheaper than pro)
|
||||||
|
stripePriceId: process.env.MAX_PLAN_STRIPE_PRICE_ID,
|
||||||
|
features: {
|
||||||
|
episodesPerMonth: 10000,
|
||||||
|
searchesPerMonth: 10000,
|
||||||
|
mcpIntegrations: -1, // unlimited
|
||||||
|
prioritySupport: true,
|
||||||
|
customIntegrations: true,
|
||||||
|
dedicatedSupport: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
// Credit costs per operation
|
||||||
|
creditCosts: {
|
||||||
|
addEpisode: parseInt(process.env.CREDIT_COST_EPISODE || "1", 10),
|
||||||
|
search: parseInt(process.env.CREDIT_COST_SEARCH || "1", 10),
|
||||||
|
chatMessage: parseInt(process.env.CREDIT_COST_CHAT || "1", 10),
|
||||||
|
},
|
||||||
|
|
||||||
|
// Billing cycle settings
|
||||||
|
billingCycle: {
|
||||||
|
// When to reset credits (1st of each month by default)
|
||||||
|
resetDay: parseInt(process.env.BILLING_RESET_DAY || "1", 10),
|
||||||
|
},
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get plan configuration by plan type
|
||||||
|
*/
|
||||||
|
export function getPlanConfig(planType: "FREE" | "PRO" | "MAX") {
|
||||||
|
return BILLING_CONFIG.plans[
|
||||||
|
planType.toLowerCase() as keyof typeof BILLING_CONFIG.plans
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if billing is enabled
|
||||||
|
*/
|
||||||
|
export function isBillingEnabled(): boolean {
|
||||||
|
return BILLING_CONFIG.enabled;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if Stripe is configured
|
||||||
|
*/
|
||||||
|
export function isStripeConfigured(): boolean {
|
||||||
|
return !!(
|
||||||
|
BILLING_CONFIG.stripe.secretKey && BILLING_CONFIG.stripe.publishableKey
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate billing configuration
|
||||||
|
*/
|
||||||
|
export function validateBillingConfig() {
|
||||||
|
if (!BILLING_CONFIG.enabled) {
|
||||||
|
console.log(
|
||||||
|
"ℹ️ Billing is disabled. Running in self-hosted mode with unlimited credits.",
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isStripeConfigured()) {
|
||||||
|
console.warn(
|
||||||
|
"⚠️ ENABLE_BILLING is true but Stripe is not configured. Billing will not work.",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("✅ Billing is enabled with Stripe integration");
|
||||||
|
}
|
||||||
@ -2,12 +2,9 @@ import { Prisma, PrismaClient } from "@core/database";
|
|||||||
import invariant from "tiny-invariant";
|
import invariant from "tiny-invariant";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { env } from "./env.server";
|
import { env } from "./env.server";
|
||||||
import { logger } from "./services/logger.service";
|
|
||||||
import { isValidDatabaseUrl } from "./utils/db";
|
import { isValidDatabaseUrl } from "./utils/db";
|
||||||
import { singleton } from "./utils/singleton";
|
import { singleton } from "./utils/singleton";
|
||||||
|
|
||||||
import { type Span } from "@opentelemetry/api";
|
|
||||||
|
|
||||||
export { Prisma };
|
export { Prisma };
|
||||||
|
|
||||||
export const prisma = singleton("prisma", getClient);
|
export const prisma = singleton("prisma", getClient);
|
||||||
|
|||||||
@ -17,6 +17,7 @@ import { renderToPipeableStream } from "react-dom/server";
|
|||||||
import { initializeStartupServices } from "./utils/startup";
|
import { initializeStartupServices } from "./utils/startup";
|
||||||
import { handleMCPRequest, handleSessionRequest } from "~/services/mcp.server";
|
import { handleMCPRequest, handleSessionRequest } from "~/services/mcp.server";
|
||||||
import { authenticateHybridRequest } from "~/services/routeBuilders/apiBuilder.server";
|
import { authenticateHybridRequest } from "~/services/routeBuilders/apiBuilder.server";
|
||||||
|
import { trackError } from "~/services/telemetry.server";
|
||||||
|
|
||||||
const ABORT_DELAY = 5_000;
|
const ABORT_DELAY = 5_000;
|
||||||
|
|
||||||
@ -27,6 +28,42 @@ async function init() {
|
|||||||
|
|
||||||
init();
|
init();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Global error handler for all server-side errors
|
||||||
|
* This catches errors from loaders, actions, and rendering
|
||||||
|
* Automatically tracks all errors to telemetry
|
||||||
|
*/
|
||||||
|
export function handleError(
|
||||||
|
error: unknown,
|
||||||
|
{ request }: { request: Request },
|
||||||
|
): void {
|
||||||
|
// Don't track 404s or aborted requests as errors
|
||||||
|
if (
|
||||||
|
error instanceof Response &&
|
||||||
|
(error.status === 404 || error.status === 304)
|
||||||
|
) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track error to telemetry
|
||||||
|
if (error instanceof Error) {
|
||||||
|
const url = new URL(request.url);
|
||||||
|
trackError(error, {
|
||||||
|
url: request.url,
|
||||||
|
path: url.pathname,
|
||||||
|
method: request.method,
|
||||||
|
userAgent: request.headers.get("user-agent") || "unknown",
|
||||||
|
referer: request.headers.get("referer") || undefined,
|
||||||
|
}).catch((trackingError) => {
|
||||||
|
// If telemetry tracking fails, just log it - don't break the app
|
||||||
|
console.error("Failed to track error:", trackingError);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always log to console for development/debugging
|
||||||
|
console.error(error);
|
||||||
|
}
|
||||||
|
|
||||||
export default function handleRequest(
|
export default function handleRequest(
|
||||||
request: Request,
|
request: Request,
|
||||||
responseStatusCode: number,
|
responseStatusCode: number,
|
||||||
|
|||||||
@ -3,92 +3,146 @@ import { isValidDatabaseUrl } from "./utils/db";
|
|||||||
import { isValidRegex } from "./utils/regex";
|
import { isValidRegex } from "./utils/regex";
|
||||||
import { LLMModelEnum } from "@core/types";
|
import { LLMModelEnum } from "@core/types";
|
||||||
|
|
||||||
const EnvironmentSchema = z.object({
|
const EnvironmentSchema = z
|
||||||
NODE_ENV: z.union([
|
.object({
|
||||||
z.literal("development"),
|
NODE_ENV: z.union([
|
||||||
z.literal("production"),
|
z.literal("development"),
|
||||||
z.literal("test"),
|
z.literal("production"),
|
||||||
]),
|
z.literal("test"),
|
||||||
POSTGRES_DB: z.string(),
|
]),
|
||||||
DATABASE_URL: z
|
POSTGRES_DB: z.string(),
|
||||||
.string()
|
DATABASE_URL: z
|
||||||
.refine(
|
.string()
|
||||||
isValidDatabaseUrl,
|
.refine(
|
||||||
"DATABASE_URL is invalid, for details please check the additional output above this message.",
|
isValidDatabaseUrl,
|
||||||
),
|
"DATABASE_URL is invalid, for details please check the additional output above this message.",
|
||||||
DATABASE_CONNECTION_LIMIT: z.coerce.number().int().default(10),
|
),
|
||||||
DATABASE_POOL_TIMEOUT: z.coerce.number().int().default(60),
|
DATABASE_CONNECTION_LIMIT: z.coerce.number().int().default(10),
|
||||||
DATABASE_CONNECTION_TIMEOUT: z.coerce.number().int().default(20),
|
DATABASE_POOL_TIMEOUT: z.coerce.number().int().default(60),
|
||||||
DIRECT_URL: z
|
DATABASE_CONNECTION_TIMEOUT: z.coerce.number().int().default(20),
|
||||||
.string()
|
DIRECT_URL: z
|
||||||
.refine(
|
.string()
|
||||||
isValidDatabaseUrl,
|
.refine(
|
||||||
"DIRECT_URL is invalid, for details please check the additional output above this message.",
|
isValidDatabaseUrl,
|
||||||
),
|
"DIRECT_URL is invalid, for details please check the additional output above this message.",
|
||||||
DATABASE_READ_REPLICA_URL: z.string().optional(),
|
),
|
||||||
SESSION_SECRET: z.string(),
|
DATABASE_READ_REPLICA_URL: z.string().optional(),
|
||||||
ENCRYPTION_KEY: z.string(),
|
SESSION_SECRET: z.string(),
|
||||||
MAGIC_LINK_SECRET: z.string(),
|
ENCRYPTION_KEY: z.string(),
|
||||||
WHITELISTED_EMAILS: z
|
MAGIC_LINK_SECRET: z.string(),
|
||||||
.string()
|
WHITELISTED_EMAILS: z
|
||||||
.refine(isValidRegex, "WHITELISTED_EMAILS must be a valid regex.")
|
.string()
|
||||||
.optional(),
|
.refine(isValidRegex, "WHITELISTED_EMAILS must be a valid regex.")
|
||||||
ADMIN_EMAILS: z
|
.optional(),
|
||||||
.string()
|
ADMIN_EMAILS: z
|
||||||
.refine(isValidRegex, "ADMIN_EMAILS must be a valid regex.")
|
.string()
|
||||||
.optional(),
|
.refine(isValidRegex, "ADMIN_EMAILS must be a valid regex.")
|
||||||
|
.optional(),
|
||||||
|
|
||||||
APP_ENV: z.string().default(process.env.NODE_ENV),
|
APP_ENV: z.string().default(process.env.NODE_ENV),
|
||||||
LOGIN_ORIGIN: z.string().default("http://localhost:5173"),
|
LOGIN_ORIGIN: z.string().default("http://localhost:5173"),
|
||||||
APP_ORIGIN: z.string().default("http://localhost:5173"),
|
APP_ORIGIN: z.string().default("http://localhost:5173"),
|
||||||
POSTHOG_PROJECT_KEY: z.string().default(""),
|
|
||||||
|
|
||||||
//storage
|
// Telemetry
|
||||||
ACCESS_KEY_ID: z.string().optional(),
|
POSTHOG_PROJECT_KEY: z
|
||||||
SECRET_ACCESS_KEY: z.string().optional(),
|
.string()
|
||||||
BUCKET: z.string().optional(),
|
.default("phc_SwfGIzzX5gh5bazVWoRxZTBhkr7FwvzArS0NRyGXm1a"),
|
||||||
|
TELEMETRY_ENABLED: z
|
||||||
|
.string()
|
||||||
|
.optional()
|
||||||
|
.default("true")
|
||||||
|
.transform((val) => val !== "false" && val !== "0"),
|
||||||
|
TELEMETRY_ANONYMOUS: z
|
||||||
|
.string()
|
||||||
|
.optional()
|
||||||
|
.default("false")
|
||||||
|
.transform((val) => val === "true" || val === "1"),
|
||||||
|
|
||||||
// google auth
|
//storage
|
||||||
AUTH_GOOGLE_CLIENT_ID: z.string().optional(),
|
ACCESS_KEY_ID: z.string().optional(),
|
||||||
AUTH_GOOGLE_CLIENT_SECRET: z.string().optional(),
|
SECRET_ACCESS_KEY: z.string().optional(),
|
||||||
|
BUCKET: z.string().optional(),
|
||||||
|
|
||||||
ENABLE_EMAIL_LOGIN: z.coerce.boolean().default(true),
|
// google auth
|
||||||
|
AUTH_GOOGLE_CLIENT_ID: z.string().optional(),
|
||||||
|
AUTH_GOOGLE_CLIENT_SECRET: z.string().optional(),
|
||||||
|
|
||||||
//Redis
|
ENABLE_EMAIL_LOGIN: z
|
||||||
REDIS_HOST: z.string().default("localhost"),
|
.string()
|
||||||
REDIS_PORT: z.coerce.number().default(6379),
|
.optional()
|
||||||
REDIS_TLS_DISABLED: z.coerce.boolean().default(true),
|
.default("true")
|
||||||
|
.transform((val) => val !== "false" && val !== "0"),
|
||||||
|
|
||||||
//Neo4j
|
//Redis
|
||||||
NEO4J_URI: z.string(),
|
REDIS_HOST: z.string().default("localhost"),
|
||||||
NEO4J_USERNAME: z.string(),
|
REDIS_PORT: z.coerce.number().default(6379),
|
||||||
NEO4J_PASSWORD: z.string(),
|
REDIS_TLS_DISABLED: z
|
||||||
|
.string()
|
||||||
|
.optional()
|
||||||
|
.default("true")
|
||||||
|
.transform((val) => val !== "false" && val !== "0"),
|
||||||
|
|
||||||
//OpenAI
|
//Neo4j
|
||||||
OPENAI_API_KEY: z.string(),
|
NEO4J_URI: z.string(),
|
||||||
|
NEO4J_USERNAME: z.string(),
|
||||||
|
NEO4J_PASSWORD: z.string(),
|
||||||
|
|
||||||
EMAIL_TRANSPORT: z.string().optional(),
|
//OpenAI
|
||||||
FROM_EMAIL: z.string().optional(),
|
OPENAI_API_KEY: z.string().optional(),
|
||||||
REPLY_TO_EMAIL: z.string().optional(),
|
ANTHROPIC_API_KEY: z.string().optional(),
|
||||||
RESEND_API_KEY: z.string().optional(),
|
GOOGLE_GENERATIVE_AI_API_KEY: z.string().optional(),
|
||||||
SMTP_HOST: z.string().optional(),
|
|
||||||
SMTP_PORT: z.coerce.number().optional(),
|
|
||||||
SMTP_SECURE: z.coerce.boolean().optional(),
|
|
||||||
SMTP_USER: z.string().optional(),
|
|
||||||
SMTP_PASSWORD: z.string().optional(),
|
|
||||||
|
|
||||||
//Trigger
|
EMAIL_TRANSPORT: z.string().optional(),
|
||||||
TRIGGER_PROJECT_ID: z.string(),
|
FROM_EMAIL: z.string().optional(),
|
||||||
TRIGGER_SECRET_KEY: z.string(),
|
REPLY_TO_EMAIL: z.string().optional(),
|
||||||
TRIGGER_API_URL: z.string(),
|
RESEND_API_KEY: z.string().optional(),
|
||||||
TRIGGER_DB: z.string().default("trigger"),
|
SMTP_HOST: z.string().optional(),
|
||||||
|
SMTP_PORT: z.coerce.number().optional(),
|
||||||
|
SMTP_SECURE: z
|
||||||
|
.string()
|
||||||
|
.optional()
|
||||||
|
.transform((val) => val === "true" || val === "1"),
|
||||||
|
SMTP_USER: z.string().optional(),
|
||||||
|
SMTP_PASSWORD: z.string().optional(),
|
||||||
|
|
||||||
// Model envs
|
//Trigger
|
||||||
MODEL: z.string().default(LLMModelEnum.GPT41),
|
TRIGGER_PROJECT_ID: z.string().optional(),
|
||||||
EMBEDDING_MODEL: z.string().default("mxbai-embed-large"),
|
TRIGGER_SECRET_KEY: z.string().optional(),
|
||||||
OLLAMA_URL: z.string().optional(),
|
TRIGGER_API_URL: z.string().optional(),
|
||||||
COHERE_API_KEY: z.string().optional(),
|
TRIGGER_DB: z.string().default("trigger"),
|
||||||
});
|
|
||||||
|
// Model envs
|
||||||
|
MODEL: z.string().default(LLMModelEnum.GPT41),
|
||||||
|
EMBEDDING_MODEL: z.string().default("mxbai-embed-large"),
|
||||||
|
EMBEDDING_MODEL_SIZE: z.string().default("1024"),
|
||||||
|
OLLAMA_URL: z.string().optional(),
|
||||||
|
COHERE_API_KEY: z.string().optional(),
|
||||||
|
COHERE_SCORE_THRESHOLD: z.string().default("0.3"),
|
||||||
|
|
||||||
|
AWS_ACCESS_KEY_ID: z.string().optional(),
|
||||||
|
AWS_SECRET_ACCESS_KEY: z.string().optional(),
|
||||||
|
AWS_REGION: z.string().optional(),
|
||||||
|
|
||||||
|
// Queue provider
|
||||||
|
QUEUE_PROVIDER: z.enum(["trigger", "bullmq"]).default("trigger"),
|
||||||
|
})
|
||||||
|
.refine(
|
||||||
|
(data) => {
|
||||||
|
// If QUEUE_PROVIDER is "trigger", then Trigger.dev variables must be present
|
||||||
|
if (data.QUEUE_PROVIDER === "trigger") {
|
||||||
|
return !!(
|
||||||
|
data.TRIGGER_PROJECT_ID &&
|
||||||
|
data.TRIGGER_SECRET_KEY &&
|
||||||
|
data.TRIGGER_API_URL
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
message:
|
||||||
|
"TRIGGER_PROJECT_ID, TRIGGER_SECRET_KEY, and TRIGGER_API_URL are required when QUEUE_PROVIDER=trigger",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
export type Environment = z.infer<typeof EnvironmentSchema>;
|
export type Environment = z.infer<typeof EnvironmentSchema>;
|
||||||
export const env = EnvironmentSchema.parse(process.env);
|
export const env = EnvironmentSchema.parse(process.env);
|
||||||
|
|||||||
@ -15,6 +15,8 @@ export interface LogItem {
|
|||||||
activityId?: string;
|
activityId?: string;
|
||||||
episodeUUID?: string;
|
episodeUUID?: string;
|
||||||
data?: any;
|
data?: any;
|
||||||
|
spaceIds?: string[];
|
||||||
|
episodeDetails?: any;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface LogsResponse {
|
export interface LogsResponse {
|
||||||
@ -47,7 +49,7 @@ export function useLogs({ endpoint, source, status, type }: UseLogsOptions) {
|
|||||||
(pageNum: number) => {
|
(pageNum: number) => {
|
||||||
const params = new URLSearchParams();
|
const params = new URLSearchParams();
|
||||||
params.set("page", pageNum.toString());
|
params.set("page", pageNum.toString());
|
||||||
params.set("limit", "5");
|
params.set("limit", "50");
|
||||||
if (source) params.set("source", source);
|
if (source) params.set("source", source);
|
||||||
if (status) params.set("status", status);
|
if (status) params.set("status", status);
|
||||||
if (type) params.set("type", type);
|
if (type) params.set("type", type);
|
||||||
|
|||||||
186
apps/webapp/app/hooks/use-toast.ts
Normal file
186
apps/webapp/app/hooks/use-toast.ts
Normal file
@ -0,0 +1,186 @@
|
|||||||
|
import * as React from "react";
|
||||||
|
|
||||||
|
import type { ToastActionElement, ToastProps } from "~/components/ui/toast";
|
||||||
|
|
||||||
|
const TOAST_LIMIT = 1;
|
||||||
|
const TOAST_REMOVE_DELAY = 1000000;
|
||||||
|
|
||||||
|
type ToasterToast = ToastProps & {
|
||||||
|
id: string;
|
||||||
|
title?: React.ReactNode;
|
||||||
|
description?: React.ReactNode;
|
||||||
|
action?: ToastActionElement;
|
||||||
|
};
|
||||||
|
|
||||||
|
const actionTypes = {
|
||||||
|
ADD_TOAST: "ADD_TOAST",
|
||||||
|
UPDATE_TOAST: "UPDATE_TOAST",
|
||||||
|
DISMISS_TOAST: "DISMISS_TOAST",
|
||||||
|
REMOVE_TOAST: "REMOVE_TOAST",
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
let count = 0;
|
||||||
|
|
||||||
|
function genId() {
|
||||||
|
count = (count + 1) % Number.MAX_SAFE_INTEGER;
|
||||||
|
return count.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
type ActionType = typeof actionTypes;
|
||||||
|
|
||||||
|
type Action =
|
||||||
|
| {
|
||||||
|
type: ActionType["ADD_TOAST"];
|
||||||
|
toast: ToasterToast;
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
type: ActionType["UPDATE_TOAST"];
|
||||||
|
toast: Partial<ToasterToast>;
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
type: ActionType["DISMISS_TOAST"];
|
||||||
|
toastId?: ToasterToast["id"];
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
type: ActionType["REMOVE_TOAST"];
|
||||||
|
toastId?: ToasterToast["id"];
|
||||||
|
};
|
||||||
|
|
||||||
|
interface State {
|
||||||
|
toasts: ToasterToast[];
|
||||||
|
}
|
||||||
|
|
||||||
|
const toastTimeouts = new Map<string, ReturnType<typeof setTimeout>>();
|
||||||
|
|
||||||
|
const addToRemoveQueue = (toastId: string) => {
|
||||||
|
if (toastTimeouts.has(toastId)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const timeout = setTimeout(() => {
|
||||||
|
toastTimeouts.delete(toastId);
|
||||||
|
dispatch({
|
||||||
|
type: "REMOVE_TOAST",
|
||||||
|
toastId: toastId,
|
||||||
|
});
|
||||||
|
}, TOAST_REMOVE_DELAY);
|
||||||
|
|
||||||
|
toastTimeouts.set(toastId, timeout);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const reducer = (state: State, action: Action): State => {
|
||||||
|
switch (action.type) {
|
||||||
|
case "ADD_TOAST":
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
toasts: [action.toast, ...state.toasts].slice(0, TOAST_LIMIT),
|
||||||
|
};
|
||||||
|
|
||||||
|
case "UPDATE_TOAST":
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
toasts: state.toasts.map((t) =>
|
||||||
|
t.id === action.toast.id ? { ...t, ...action.toast } : t,
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
case "DISMISS_TOAST": {
|
||||||
|
const { toastId } = action;
|
||||||
|
|
||||||
|
if (toastId) {
|
||||||
|
addToRemoveQueue(toastId);
|
||||||
|
} else {
|
||||||
|
state.toasts.forEach((toast) => {
|
||||||
|
addToRemoveQueue(toast.id);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
toasts: state.toasts.map((t) =>
|
||||||
|
t.id === toastId || toastId === undefined
|
||||||
|
? {
|
||||||
|
...t,
|
||||||
|
open: false,
|
||||||
|
}
|
||||||
|
: t,
|
||||||
|
),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case "REMOVE_TOAST":
|
||||||
|
if (action.toastId === undefined) {
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
toasts: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
toasts: state.toasts.filter((t) => t.id !== action.toastId),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const listeners: Array<(state: State) => void> = [];
|
||||||
|
|
||||||
|
let memoryState: State = { toasts: [] };
|
||||||
|
|
||||||
|
function dispatch(action: Action) {
|
||||||
|
memoryState = reducer(memoryState, action);
|
||||||
|
listeners.forEach((listener) => {
|
||||||
|
listener(memoryState);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
type Toast = Omit<ToasterToast, "id">;
|
||||||
|
|
||||||
|
function toast({ ...props }: Toast) {
|
||||||
|
const id = genId();
|
||||||
|
|
||||||
|
const update = (props: ToasterToast) =>
|
||||||
|
dispatch({
|
||||||
|
type: "UPDATE_TOAST",
|
||||||
|
toast: { ...props, id },
|
||||||
|
});
|
||||||
|
const dismiss = () => dispatch({ type: "DISMISS_TOAST", toastId: id });
|
||||||
|
|
||||||
|
dispatch({
|
||||||
|
type: "ADD_TOAST",
|
||||||
|
toast: {
|
||||||
|
...props,
|
||||||
|
id,
|
||||||
|
open: true,
|
||||||
|
onOpenChange: (open) => {
|
||||||
|
if (!open) dismiss();
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: id,
|
||||||
|
dismiss,
|
||||||
|
update,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function useToast() {
|
||||||
|
const [state, setState] = React.useState<State>(memoryState);
|
||||||
|
|
||||||
|
React.useEffect(() => {
|
||||||
|
listeners.push(setState);
|
||||||
|
return () => {
|
||||||
|
const index = listeners.indexOf(setState);
|
||||||
|
if (index > -1) {
|
||||||
|
listeners.splice(index, 1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, [state]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
toast,
|
||||||
|
dismiss: (toastId?: string) => dispatch({ type: "DISMISS_TOAST", toastId }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export { useToast, toast };
|
||||||
@ -6,6 +6,7 @@ import { useOptionalUser, useUserChanged } from "./useUser";
|
|||||||
|
|
||||||
export const usePostHog = (
|
export const usePostHog = (
|
||||||
apiKey?: string,
|
apiKey?: string,
|
||||||
|
telemetryEnabled = true,
|
||||||
logging = false,
|
logging = false,
|
||||||
debug = false,
|
debug = false,
|
||||||
): void => {
|
): void => {
|
||||||
@ -15,6 +16,8 @@ export const usePostHog = (
|
|||||||
|
|
||||||
//start PostHog once
|
//start PostHog once
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
// Respect telemetry settings
|
||||||
|
if (!telemetryEnabled) return;
|
||||||
if (apiKey === undefined || apiKey === "") return;
|
if (apiKey === undefined || apiKey === "") return;
|
||||||
if (postHogInitialized.current === true) return;
|
if (postHogInitialized.current === true) return;
|
||||||
if (logging) console.log("Initializing PostHog");
|
if (logging) console.log("Initializing PostHog");
|
||||||
@ -27,19 +30,26 @@ export const usePostHog = (
|
|||||||
if (logging) console.log("PostHog loaded");
|
if (logging) console.log("PostHog loaded");
|
||||||
if (user !== undefined) {
|
if (user !== undefined) {
|
||||||
if (logging) console.log("Loaded: Identifying user", user);
|
if (logging) console.log("Loaded: Identifying user", user);
|
||||||
posthog.identify(user.id, { email: user.email });
|
posthog.identify(user.id, {
|
||||||
|
email: user.email,
|
||||||
|
name: user.name,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
postHogInitialized.current = true;
|
postHogInitialized.current = true;
|
||||||
}, [apiKey, logging, user]);
|
}, [apiKey, telemetryEnabled, logging, user]);
|
||||||
|
|
||||||
useUserChanged((user) => {
|
useUserChanged((user) => {
|
||||||
if (postHogInitialized.current === false) return;
|
if (postHogInitialized.current === false) return;
|
||||||
|
if (!telemetryEnabled) return;
|
||||||
if (logging) console.log("User changed");
|
if (logging) console.log("User changed");
|
||||||
if (user) {
|
if (user) {
|
||||||
if (logging) console.log("Identifying user", user);
|
if (logging) console.log("Identifying user", user);
|
||||||
posthog.identify(user.id, { email: user.email });
|
posthog.identify(user.id, {
|
||||||
|
email: user.email,
|
||||||
|
name: user.name,
|
||||||
|
});
|
||||||
} else {
|
} else {
|
||||||
if (logging) console.log("Resetting user");
|
if (logging) console.log("Resetting user");
|
||||||
posthog.reset();
|
posthog.reset();
|
||||||
|
|||||||
@ -5,7 +5,8 @@ import { useChanged } from "./useChanged";
|
|||||||
import { useTypedMatchesData } from "./useTypedMatchData";
|
import { useTypedMatchesData } from "./useTypedMatchData";
|
||||||
|
|
||||||
export interface ExtendedUser extends User {
|
export interface ExtendedUser extends User {
|
||||||
availableCredits?: number;
|
availableCredits: number;
|
||||||
|
totalCredits: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function useIsImpersonating(matches?: UIMatch[]) {
|
export function useIsImpersonating(matches?: UIMatch[]) {
|
||||||
@ -23,7 +24,11 @@ export function useOptionalUser(matches?: UIMatch[]): ExtendedUser | undefined {
|
|||||||
});
|
});
|
||||||
|
|
||||||
return routeMatch?.user
|
return routeMatch?.user
|
||||||
? { ...routeMatch?.user, availableCredits: routeMatch?.availableCredits }
|
? {
|
||||||
|
...routeMatch?.user,
|
||||||
|
availableCredits: routeMatch?.availableCredits,
|
||||||
|
totalCredits: routeMatch?.totalCredits,
|
||||||
|
}
|
||||||
: undefined;
|
: undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
250
apps/webapp/app/jobs/bert/topic-analysis.logic.ts
Normal file
250
apps/webapp/app/jobs/bert/topic-analysis.logic.ts
Normal file
@ -0,0 +1,250 @@
|
|||||||
|
import { exec } from "child_process";
|
||||||
|
import { promisify } from "util";
|
||||||
|
import { identifySpacesForTopics } from "~/jobs/spaces/space-identification.logic";
|
||||||
|
import { assignEpisodesToSpace } from "~/services/graphModels/space";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import { SpaceService } from "~/services/space.server";
|
||||||
|
import { prisma } from "~/trigger/utils/prisma";
|
||||||
|
|
||||||
|
const execAsync = promisify(exec);
|
||||||
|
|
||||||
|
export interface TopicAnalysisPayload {
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
minTopicSize?: number;
|
||||||
|
nrTopics?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TopicAnalysisResult {
|
||||||
|
topics: {
|
||||||
|
[topicId: string]: {
|
||||||
|
keywords: string[];
|
||||||
|
episodeIds: string[];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run BERT analysis using exec (for BullMQ/Docker)
|
||||||
|
*/
|
||||||
|
async function runBertWithExec(
|
||||||
|
userId: string,
|
||||||
|
minTopicSize: number,
|
||||||
|
nrTopics?: number,
|
||||||
|
): Promise<string> {
|
||||||
|
let command = `python3 /core/apps/webapp/python/main.py ${userId} --json`;
|
||||||
|
|
||||||
|
if (minTopicSize) {
|
||||||
|
command += ` --min-topic-size ${minTopicSize}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nrTopics) {
|
||||||
|
command += ` --nr-topics ${nrTopics}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[BERT Topic Analysis] Executing: ${command}`);
|
||||||
|
|
||||||
|
const { stdout, stderr } = await execAsync(command, {
|
||||||
|
timeout: 300000, // 5 minutes
|
||||||
|
maxBuffer: 10 * 1024 * 1024, // 10MB buffer for large outputs
|
||||||
|
});
|
||||||
|
|
||||||
|
if (stderr) {
|
||||||
|
console.warn(`[BERT Topic Analysis] Warnings:`, stderr);
|
||||||
|
}
|
||||||
|
|
||||||
|
return stdout;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process BERT topic analysis on user's episodes
|
||||||
|
* This is the common logic shared between Trigger.dev and BullMQ
|
||||||
|
*
|
||||||
|
* NOTE: This function does NOT update workspace.metadata.lastTopicAnalysisAt
|
||||||
|
* That should be done by the caller BEFORE enqueueing this job to prevent
|
||||||
|
* duplicate analyses from racing conditions.
|
||||||
|
*/
|
||||||
|
export async function processTopicAnalysis(
|
||||||
|
payload: TopicAnalysisPayload,
|
||||||
|
enqueueSpaceSummary?: (params: {
|
||||||
|
spaceId: string;
|
||||||
|
userId: string;
|
||||||
|
}) => Promise<any>,
|
||||||
|
pythonRunner?: (
|
||||||
|
userId: string,
|
||||||
|
minTopicSize: number,
|
||||||
|
nrTopics?: number,
|
||||||
|
) => Promise<string>,
|
||||||
|
): Promise<TopicAnalysisResult> {
|
||||||
|
const { userId, workspaceId, minTopicSize = 10, nrTopics } = payload;
|
||||||
|
|
||||||
|
console.log(`[BERT Topic Analysis] Starting analysis for user: ${userId}`);
|
||||||
|
console.log(
|
||||||
|
`[BERT Topic Analysis] Parameters: minTopicSize=${minTopicSize}, nrTopics=${nrTopics || "auto"}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
// Run BERT analysis using provided runner or default exec
|
||||||
|
const runner = pythonRunner || runBertWithExec;
|
||||||
|
const stdout = await runner(userId, minTopicSize, nrTopics);
|
||||||
|
|
||||||
|
const duration = Date.now() - startTime;
|
||||||
|
console.log(`[BERT Topic Analysis] Completed in ${duration}ms`);
|
||||||
|
|
||||||
|
// Parse the JSON output
|
||||||
|
const result: TopicAnalysisResult = JSON.parse(stdout);
|
||||||
|
|
||||||
|
// Log summary
|
||||||
|
const topicCount = Object.keys(result.topics).length;
|
||||||
|
const totalEpisodes = Object.values(result.topics).reduce(
|
||||||
|
(sum, topic) => sum + topic.episodeIds.length,
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[BERT Topic Analysis] Found ${topicCount} topics covering ${totalEpisodes} episodes`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Step 2: Identify spaces for topics using LLM
|
||||||
|
try {
|
||||||
|
logger.info("[BERT Topic Analysis] Starting space identification", {
|
||||||
|
userId,
|
||||||
|
topicCount,
|
||||||
|
});
|
||||||
|
|
||||||
|
const spaceProposals = await identifySpacesForTopics({
|
||||||
|
userId,
|
||||||
|
topics: result.topics,
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info("[BERT Topic Analysis] Space identification completed", {
|
||||||
|
userId,
|
||||||
|
proposalCount: spaceProposals.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Step 3: Create or find spaces and assign episodes
|
||||||
|
// Get existing spaces from PostgreSQL
|
||||||
|
const existingSpacesFromDb = await prisma.space.findMany({
|
||||||
|
where: { workspaceId },
|
||||||
|
});
|
||||||
|
const existingSpacesByName = new Map(
|
||||||
|
existingSpacesFromDb.map((s) => [s.name.toLowerCase(), s]),
|
||||||
|
);
|
||||||
|
|
||||||
|
for (const proposal of spaceProposals) {
|
||||||
|
try {
|
||||||
|
// Check if space already exists (case-insensitive match)
|
||||||
|
let spaceId: string;
|
||||||
|
const existingSpace = existingSpacesByName.get(
|
||||||
|
proposal.name.toLowerCase(),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (existingSpace) {
|
||||||
|
// Use existing space
|
||||||
|
spaceId = existingSpace.id;
|
||||||
|
logger.info("[BERT Topic Analysis] Using existing space", {
|
||||||
|
spaceName: proposal.name,
|
||||||
|
spaceId,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Create new space (creates in both PostgreSQL and Neo4j)
|
||||||
|
// Skip automatic space assignment since we're manually assigning from BERT topics
|
||||||
|
const spaceService = new SpaceService();
|
||||||
|
const newSpace = await spaceService.createSpace({
|
||||||
|
name: proposal.name,
|
||||||
|
description: proposal.intent,
|
||||||
|
userId,
|
||||||
|
workspaceId,
|
||||||
|
});
|
||||||
|
spaceId = newSpace.id;
|
||||||
|
logger.info("[BERT Topic Analysis] Created new space", {
|
||||||
|
spaceName: proposal.name,
|
||||||
|
spaceId,
|
||||||
|
intent: proposal.intent,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect all episode IDs from the topics in this proposal
|
||||||
|
const episodeIds: string[] = [];
|
||||||
|
for (const topicId of proposal.topics) {
|
||||||
|
const topic = result.topics[topicId];
|
||||||
|
if (topic) {
|
||||||
|
episodeIds.push(...topic.episodeIds);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assign all episodes from these topics to the space
|
||||||
|
if (episodeIds.length > 0) {
|
||||||
|
await assignEpisodesToSpace(episodeIds, spaceId, userId);
|
||||||
|
logger.info("[BERT Topic Analysis] Assigned episodes to space", {
|
||||||
|
spaceName: proposal.name,
|
||||||
|
spaceId,
|
||||||
|
episodeCount: episodeIds.length,
|
||||||
|
topics: proposal.topics,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Step 4: Trigger space summary if callback provided
|
||||||
|
if (enqueueSpaceSummary) {
|
||||||
|
await enqueueSpaceSummary({ spaceId, userId });
|
||||||
|
logger.info("[BERT Topic Analysis] Triggered space summary", {
|
||||||
|
spaceName: proposal.name,
|
||||||
|
spaceId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (spaceError) {
|
||||||
|
logger.error(
|
||||||
|
"[BERT Topic Analysis] Failed to process space proposal",
|
||||||
|
{
|
||||||
|
proposal,
|
||||||
|
error: spaceError,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
// Continue with other proposals
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (spaceIdentificationError) {
|
||||||
|
logger.error(
|
||||||
|
"[BERT Topic Analysis] Space identification failed, returning topics only",
|
||||||
|
{
|
||||||
|
error: spaceIdentificationError,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
// Return topics even if space identification fails
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`[BERT Topic Analysis] Error:`, error);
|
||||||
|
|
||||||
|
if (error instanceof Error) {
|
||||||
|
// Check for timeout
|
||||||
|
if (error.message.includes("ETIMEDOUT")) {
|
||||||
|
throw new Error(
|
||||||
|
`Topic analysis timed out after 5 minutes. User may have too many episodes.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for Python errors
|
||||||
|
if (error.message.includes("python3: not found")) {
|
||||||
|
throw new Error(`Python 3 is not installed or not available in PATH.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for Neo4j connection errors
|
||||||
|
if (error.message.includes("Failed to connect to Neo4j")) {
|
||||||
|
throw new Error(
|
||||||
|
`Could not connect to Neo4j. Check NEO4J_URI and credentials.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for no episodes
|
||||||
|
if (error.message.includes("No episodes found")) {
|
||||||
|
throw new Error(`No episodes found for userId: ${userId}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
82
apps/webapp/app/jobs/conversation/create-title.logic.ts
Normal file
82
apps/webapp/app/jobs/conversation/create-title.logic.ts
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
import { conversationTitlePrompt } from "~/trigger/conversation/prompt";
|
||||||
|
import { prisma } from "~/trigger/utils/prisma";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import { generateText, type LanguageModel } from "ai";
|
||||||
|
import { getModel } from "~/lib/model.server";
|
||||||
|
|
||||||
|
export interface CreateConversationTitlePayload {
|
||||||
|
conversationId: string;
|
||||||
|
message: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateConversationTitleResult {
|
||||||
|
success: boolean;
|
||||||
|
title?: string;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Core business logic for creating conversation titles
|
||||||
|
* This is shared between Trigger.dev and BullMQ implementations
|
||||||
|
*/
|
||||||
|
export async function processConversationTitleCreation(
|
||||||
|
payload: CreateConversationTitlePayload,
|
||||||
|
): Promise<CreateConversationTitleResult> {
|
||||||
|
try {
|
||||||
|
let conversationTitleResponse = "";
|
||||||
|
const { text } = await generateText({
|
||||||
|
model: getModel() as LanguageModel,
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: conversationTitlePrompt.replace(
|
||||||
|
"{{message}}",
|
||||||
|
payload.message,
|
||||||
|
),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const outputMatch = text.match(/<output>(.*?)<\/output>/s);
|
||||||
|
|
||||||
|
logger.info(`Conversation title data: ${JSON.stringify(outputMatch)}`);
|
||||||
|
|
||||||
|
if (!outputMatch) {
|
||||||
|
logger.error("No output found in recurrence response");
|
||||||
|
throw new Error("Invalid response format from AI");
|
||||||
|
}
|
||||||
|
|
||||||
|
const jsonStr = outputMatch[1].trim();
|
||||||
|
const conversationTitleData = JSON.parse(jsonStr);
|
||||||
|
|
||||||
|
if (conversationTitleData) {
|
||||||
|
await prisma.conversation.update({
|
||||||
|
where: {
|
||||||
|
id: payload.conversationId,
|
||||||
|
},
|
||||||
|
data: {
|
||||||
|
title: conversationTitleData.title,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
title: conversationTitleData.title,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "No title generated",
|
||||||
|
};
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(
|
||||||
|
`Error creating conversation title for ${payload.conversationId}:`,
|
||||||
|
error,
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: error.message,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
290
apps/webapp/app/jobs/ingest/ingest-document.logic.ts
Normal file
290
apps/webapp/app/jobs/ingest/ingest-document.logic.ts
Normal file
@ -0,0 +1,290 @@
|
|||||||
|
import { type z } from "zod";
|
||||||
|
|
||||||
|
import { IngestionStatus } from "@core/database";
|
||||||
|
import { EpisodeTypeEnum } from "@core/types";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import { saveDocument } from "~/services/graphModels/document";
|
||||||
|
|
||||||
|
import { DocumentVersioningService } from "~/services/documentVersioning.server";
|
||||||
|
import { DocumentDifferentialService } from "~/services/documentDiffer.server";
|
||||||
|
import { KnowledgeGraphService } from "~/services/knowledgeGraph.server";
|
||||||
|
import { prisma } from "~/trigger/utils/prisma";
|
||||||
|
import { type IngestBodyRequest } from "./ingest-episode.logic";
|
||||||
|
|
||||||
|
export interface IngestDocumentPayload {
|
||||||
|
body: z.infer<typeof IngestBodyRequest>;
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
queueId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IngestDocumentResult {
|
||||||
|
success: boolean;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Core business logic for document ingestion with differential processing
|
||||||
|
* This is shared between Trigger.dev and BullMQ implementations
|
||||||
|
*
|
||||||
|
* Note: This function should NOT call trigger functions directly for chunk processing.
|
||||||
|
* Instead, use the enqueueEpisodeIngestion callback to queue episode ingestion jobs.
|
||||||
|
*/
|
||||||
|
export async function processDocumentIngestion(
|
||||||
|
payload: IngestDocumentPayload,
|
||||||
|
// Callback function for enqueueing episode ingestion for each chunk
|
||||||
|
enqueueEpisodeIngestion?: (params: {
|
||||||
|
body: any;
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
queueId: string;
|
||||||
|
}) => Promise<{ id?: string }>,
|
||||||
|
): Promise<IngestDocumentResult> {
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
try {
|
||||||
|
logger.log(`Processing document for user ${payload.userId}`, {
|
||||||
|
contentLength: payload.body.episodeBody.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
status: IngestionStatus.PROCESSING,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const documentBody = payload.body;
|
||||||
|
|
||||||
|
// Step 1: Initialize services and prepare document version
|
||||||
|
const versioningService = new DocumentVersioningService();
|
||||||
|
const differentialService = new DocumentDifferentialService();
|
||||||
|
const knowledgeGraphService = new KnowledgeGraphService();
|
||||||
|
|
||||||
|
const {
|
||||||
|
documentNode: document,
|
||||||
|
versionInfo,
|
||||||
|
chunkedDocument,
|
||||||
|
} = await versioningService.prepareDocumentVersion(
|
||||||
|
documentBody.sessionId!,
|
||||||
|
payload.userId,
|
||||||
|
documentBody.metadata?.documentTitle?.toString() || "Untitled Document",
|
||||||
|
documentBody.episodeBody,
|
||||||
|
documentBody.source,
|
||||||
|
documentBody.metadata || {},
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.log(`Document version analysis:`, {
|
||||||
|
version: versionInfo.newVersion,
|
||||||
|
isNewDocument: versionInfo.isNewDocument,
|
||||||
|
hasContentChanged: versionInfo.hasContentChanged,
|
||||||
|
changePercentage: versionInfo.chunkLevelChanges.changePercentage,
|
||||||
|
changedChunks: versionInfo.chunkLevelChanges.changedChunkIndices.length,
|
||||||
|
totalChunks: versionInfo.chunkLevelChanges.totalChunks,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Step 2: Determine processing strategy
|
||||||
|
const differentialDecision =
|
||||||
|
await differentialService.analyzeDifferentialNeed(
|
||||||
|
documentBody.episodeBody,
|
||||||
|
versionInfo.existingDocument,
|
||||||
|
chunkedDocument,
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.log(`Differential analysis:`, {
|
||||||
|
shouldUseDifferential: differentialDecision.shouldUseDifferential,
|
||||||
|
strategy: differentialDecision.strategy,
|
||||||
|
reason: differentialDecision.reason,
|
||||||
|
documentSizeTokens: differentialDecision.documentSizeTokens,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Early return for unchanged documents
|
||||||
|
if (differentialDecision.strategy === "skip_processing") {
|
||||||
|
logger.log("Document content unchanged, skipping processing");
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
status: IngestionStatus.COMPLETED,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 3: Save the new document version
|
||||||
|
await saveDocument(document);
|
||||||
|
|
||||||
|
// Step 3.1: Invalidate statements from previous document version if it exists
|
||||||
|
let invalidationResults = null;
|
||||||
|
if (versionInfo.existingDocument && versionInfo.hasContentChanged) {
|
||||||
|
logger.log(
|
||||||
|
`Invalidating statements from previous document version: ${versionInfo.existingDocument.uuid}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
invalidationResults =
|
||||||
|
await knowledgeGraphService.invalidateStatementsFromPreviousDocumentVersion(
|
||||||
|
{
|
||||||
|
previousDocumentUuid: versionInfo.existingDocument.uuid,
|
||||||
|
newDocumentContent: documentBody.episodeBody,
|
||||||
|
userId: payload.userId,
|
||||||
|
invalidatedBy: document.uuid,
|
||||||
|
semanticSimilarityThreshold: 0.75, // Configurable threshold
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.log(`Statement invalidation completed:`, {
|
||||||
|
totalAnalyzed: invalidationResults.totalStatementsAnalyzed,
|
||||||
|
invalidated: invalidationResults.invalidatedStatements.length,
|
||||||
|
preserved: invalidationResults.preservedStatements.length,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log(`Document chunked into ${chunkedDocument.chunks.length} chunks`);
|
||||||
|
|
||||||
|
// Step 4: Process chunks based on differential strategy
|
||||||
|
let chunksToProcess = chunkedDocument.chunks;
|
||||||
|
let processingMode = "full";
|
||||||
|
|
||||||
|
if (
|
||||||
|
differentialDecision.shouldUseDifferential &&
|
||||||
|
differentialDecision.strategy === "chunk_level_diff"
|
||||||
|
) {
|
||||||
|
// Only process changed chunks
|
||||||
|
const chunkComparisons = differentialService.getChunkComparisons(
|
||||||
|
versionInfo.existingDocument!,
|
||||||
|
chunkedDocument,
|
||||||
|
);
|
||||||
|
|
||||||
|
const changedIndices =
|
||||||
|
differentialService.getChunksNeedingReprocessing(chunkComparisons);
|
||||||
|
chunksToProcess = chunkedDocument.chunks.filter((chunk) =>
|
||||||
|
changedIndices.includes(chunk.chunkIndex),
|
||||||
|
);
|
||||||
|
processingMode = "differential";
|
||||||
|
|
||||||
|
logger.log(
|
||||||
|
`Differential processing: ${chunksToProcess.length}/${chunkedDocument.chunks.length} chunks need reprocessing`,
|
||||||
|
);
|
||||||
|
} else if (differentialDecision.strategy === "full_reingest") {
|
||||||
|
// Process all chunks
|
||||||
|
processingMode = "full";
|
||||||
|
logger.log(
|
||||||
|
`Full reingestion: processing all ${chunkedDocument.chunks.length} chunks`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 5: Queue chunks for processing
|
||||||
|
const episodeHandlers = [];
|
||||||
|
if (enqueueEpisodeIngestion) {
|
||||||
|
for (const chunk of chunksToProcess) {
|
||||||
|
const chunkEpisodeData = {
|
||||||
|
episodeBody: chunk.content,
|
||||||
|
referenceTime: documentBody.referenceTime,
|
||||||
|
metadata: {
|
||||||
|
...documentBody.metadata,
|
||||||
|
processingMode,
|
||||||
|
differentialStrategy: differentialDecision.strategy,
|
||||||
|
chunkHash: chunk.contentHash,
|
||||||
|
documentTitle:
|
||||||
|
documentBody.metadata?.documentTitle?.toString() ||
|
||||||
|
"Untitled Document",
|
||||||
|
chunkIndex: chunk.chunkIndex,
|
||||||
|
documentUuid: document.uuid,
|
||||||
|
},
|
||||||
|
source: documentBody.source,
|
||||||
|
spaceIds: documentBody.spaceIds,
|
||||||
|
sessionId: documentBody.sessionId,
|
||||||
|
type: EpisodeTypeEnum.DOCUMENT,
|
||||||
|
};
|
||||||
|
|
||||||
|
const episodeHandler = await enqueueEpisodeIngestion({
|
||||||
|
body: chunkEpisodeData,
|
||||||
|
userId: payload.userId,
|
||||||
|
workspaceId: payload.workspaceId,
|
||||||
|
queueId: payload.queueId,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (episodeHandler.id) {
|
||||||
|
episodeHandlers.push(episodeHandler.id);
|
||||||
|
logger.log(
|
||||||
|
`Queued chunk ${chunk.chunkIndex + 1} for ${processingMode} processing`,
|
||||||
|
{
|
||||||
|
handlerId: episodeHandler.id,
|
||||||
|
chunkSize: chunk.content.length,
|
||||||
|
chunkHash: chunk.contentHash,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate cost savings
|
||||||
|
const costSavings = differentialService.calculateCostSavings(
|
||||||
|
chunkedDocument.chunks.length,
|
||||||
|
chunksToProcess.length,
|
||||||
|
);
|
||||||
|
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
output: {
|
||||||
|
documentUuid: document.uuid,
|
||||||
|
version: versionInfo.newVersion,
|
||||||
|
totalChunks: chunkedDocument.chunks.length,
|
||||||
|
chunksProcessed: chunksToProcess.length,
|
||||||
|
chunksSkipped: costSavings.chunksSkipped,
|
||||||
|
processingMode,
|
||||||
|
differentialStrategy: differentialDecision.strategy,
|
||||||
|
estimatedSavings: `${costSavings.estimatedSavingsPercentage.toFixed(1)}%`,
|
||||||
|
statementInvalidation: invalidationResults
|
||||||
|
? {
|
||||||
|
totalAnalyzed: invalidationResults.totalStatementsAnalyzed,
|
||||||
|
invalidated: invalidationResults.invalidatedStatements.length,
|
||||||
|
preserved: invalidationResults.preservedStatements.length,
|
||||||
|
}
|
||||||
|
: null,
|
||||||
|
episodes: [],
|
||||||
|
episodeHandlers,
|
||||||
|
},
|
||||||
|
status: IngestionStatus.PROCESSING,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const processingTimeMs = Date.now() - startTime;
|
||||||
|
|
||||||
|
logger.log(
|
||||||
|
`Document differential processing completed in ${processingTimeMs}ms`,
|
||||||
|
{
|
||||||
|
documentUuid: document.uuid,
|
||||||
|
version: versionInfo.newVersion,
|
||||||
|
processingMode,
|
||||||
|
totalChunks: chunkedDocument.chunks.length,
|
||||||
|
chunksProcessed: chunksToProcess.length,
|
||||||
|
chunksSkipped: costSavings.chunksSkipped,
|
||||||
|
estimatedSavings: `${costSavings.estimatedSavingsPercentage.toFixed(1)}%`,
|
||||||
|
changePercentage: `${differentialDecision.changePercentage.toFixed(1)}%`,
|
||||||
|
statementInvalidation: invalidationResults
|
||||||
|
? {
|
||||||
|
totalAnalyzed: invalidationResults.totalStatementsAnalyzed,
|
||||||
|
invalidated: invalidationResults.invalidatedStatements.length,
|
||||||
|
preserved: invalidationResults.preservedStatements.length,
|
||||||
|
}
|
||||||
|
: "No previous version",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
return { success: true };
|
||||||
|
} catch (err: any) {
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
error: err.message,
|
||||||
|
status: IngestionStatus.FAILED,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.error(`Error processing document for user ${payload.userId}:`, err);
|
||||||
|
return { success: false, error: err.message };
|
||||||
|
}
|
||||||
|
}
|
||||||
314
apps/webapp/app/jobs/ingest/ingest-episode.logic.ts
Normal file
314
apps/webapp/app/jobs/ingest/ingest-episode.logic.ts
Normal file
@ -0,0 +1,314 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
import { KnowledgeGraphService } from "~/services/knowledgeGraph.server";
|
||||||
|
import { linkEpisodeToDocument } from "~/services/graphModels/document";
|
||||||
|
import { IngestionStatus } from "@core/database";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import { prisma } from "~/trigger/utils/prisma";
|
||||||
|
import { EpisodeType } from "@core/types";
|
||||||
|
import { deductCredits, hasCredits } from "~/trigger/utils/utils";
|
||||||
|
import { assignEpisodesToSpace } from "~/services/graphModels/space";
|
||||||
|
import {
|
||||||
|
shouldTriggerTopicAnalysis,
|
||||||
|
updateLastTopicAnalysisTime,
|
||||||
|
} from "~/services/bertTopicAnalysis.server";
|
||||||
|
|
||||||
|
export const IngestBodyRequest = z.object({
|
||||||
|
episodeBody: z.string(),
|
||||||
|
referenceTime: z.string(),
|
||||||
|
metadata: z.record(z.union([z.string(), z.number(), z.boolean()])).optional(),
|
||||||
|
source: z.string(),
|
||||||
|
spaceIds: z.array(z.string()).optional(),
|
||||||
|
sessionId: z.string().optional(),
|
||||||
|
type: z
|
||||||
|
.enum([EpisodeType.CONVERSATION, EpisodeType.DOCUMENT])
|
||||||
|
.default(EpisodeType.CONVERSATION),
|
||||||
|
});
|
||||||
|
|
||||||
|
export interface IngestEpisodePayload {
|
||||||
|
body: z.infer<typeof IngestBodyRequest>;
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
queueId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IngestEpisodeResult {
|
||||||
|
success: boolean;
|
||||||
|
episodeDetails?: any;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Core business logic for ingesting a single episode
|
||||||
|
* This is shared between Trigger.dev and BullMQ implementations
|
||||||
|
*
|
||||||
|
* Note: This function should NOT call trigger functions directly.
|
||||||
|
* Instead, return data that indicates follow-up jobs are needed,
|
||||||
|
* and let the caller (Trigger task or BullMQ worker) handle job queueing.
|
||||||
|
*/
|
||||||
|
export async function processEpisodeIngestion(
|
||||||
|
payload: IngestEpisodePayload,
|
||||||
|
// Callback functions for enqueueing follow-up jobs
|
||||||
|
enqueueSpaceAssignment?: (params: {
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
mode: "episode";
|
||||||
|
episodeIds: string[];
|
||||||
|
}) => Promise<any>,
|
||||||
|
enqueueSessionCompaction?: (params: {
|
||||||
|
userId: string;
|
||||||
|
sessionId: string;
|
||||||
|
source: string;
|
||||||
|
}) => Promise<any>,
|
||||||
|
enqueueBertTopicAnalysis?: (params: {
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
minTopicSize?: number;
|
||||||
|
nrTopics?: number;
|
||||||
|
}) => Promise<any>,
|
||||||
|
): Promise<IngestEpisodeResult> {
|
||||||
|
try {
|
||||||
|
logger.log(`Processing job for user ${payload.userId}`);
|
||||||
|
|
||||||
|
// Check if workspace has sufficient credits before processing
|
||||||
|
const hasSufficientCredits = await hasCredits(
|
||||||
|
payload.workspaceId,
|
||||||
|
"addEpisode",
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!hasSufficientCredits) {
|
||||||
|
logger.warn(`Insufficient credits for workspace ${payload.workspaceId}`);
|
||||||
|
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
status: IngestionStatus.NO_CREDITS,
|
||||||
|
error:
|
||||||
|
"Insufficient credits. Please upgrade your plan or wait for your credits to reset.",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Insufficient credits",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const ingestionQueue = await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
status: IngestionStatus.PROCESSING,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const knowledgeGraphService = new KnowledgeGraphService();
|
||||||
|
|
||||||
|
const episodeBody = payload.body as any;
|
||||||
|
|
||||||
|
const episodeDetails = await knowledgeGraphService.addEpisode(
|
||||||
|
{
|
||||||
|
...episodeBody,
|
||||||
|
userId: payload.userId,
|
||||||
|
},
|
||||||
|
prisma,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Link episode to document if it's a document chunk
|
||||||
|
if (
|
||||||
|
episodeBody.type === EpisodeType.DOCUMENT &&
|
||||||
|
episodeBody.metadata.documentUuid &&
|
||||||
|
episodeDetails.episodeUuid
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
await linkEpisodeToDocument(
|
||||||
|
episodeDetails.episodeUuid,
|
||||||
|
episodeBody.metadata.documentUuid,
|
||||||
|
episodeBody.metadata.chunkIndex || 0,
|
||||||
|
);
|
||||||
|
logger.log(
|
||||||
|
`Linked episode ${episodeDetails.episodeUuid} to document ${episodeBody.metadata.documentUuid} at chunk ${episodeBody.metadata.chunkIndex || 0}`,
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to link episode to document:`, {
|
||||||
|
error,
|
||||||
|
episodeUuid: episodeDetails.episodeUuid,
|
||||||
|
documentUuid: episodeBody.metadata.documentUuid,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let finalOutput = episodeDetails;
|
||||||
|
let episodeUuids: string[] = episodeDetails.episodeUuid
|
||||||
|
? [episodeDetails.episodeUuid]
|
||||||
|
: [];
|
||||||
|
let currentStatus: IngestionStatus = IngestionStatus.COMPLETED;
|
||||||
|
if (episodeBody.type === EpisodeType.DOCUMENT) {
|
||||||
|
const currentOutput = ingestionQueue.output as any;
|
||||||
|
currentOutput.episodes.push(episodeDetails);
|
||||||
|
episodeUuids = currentOutput.episodes.map(
|
||||||
|
(episode: any) => episode.episodeUuid,
|
||||||
|
);
|
||||||
|
|
||||||
|
finalOutput = {
|
||||||
|
...currentOutput,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (currentOutput.episodes.length !== currentOutput.totalChunks) {
|
||||||
|
currentStatus = IngestionStatus.PROCESSING;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
output: finalOutput,
|
||||||
|
status: currentStatus,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Deduct credits for episode creation
|
||||||
|
if (currentStatus === IngestionStatus.COMPLETED) {
|
||||||
|
await deductCredits(
|
||||||
|
payload.workspaceId,
|
||||||
|
"addEpisode",
|
||||||
|
finalOutput.statementsCreated,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle space assignment after successful ingestion
|
||||||
|
try {
|
||||||
|
// If spaceIds were explicitly provided, immediately assign the episode to those spaces
|
||||||
|
if (
|
||||||
|
episodeBody.spaceIds &&
|
||||||
|
episodeBody.spaceIds.length > 0 &&
|
||||||
|
episodeDetails.episodeUuid
|
||||||
|
) {
|
||||||
|
logger.info(`Assigning episode to explicitly provided spaces`, {
|
||||||
|
userId: payload.userId,
|
||||||
|
episodeId: episodeDetails.episodeUuid,
|
||||||
|
spaceIds: episodeBody.spaceIds,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Assign episode to each space
|
||||||
|
for (const spaceId of episodeBody.spaceIds) {
|
||||||
|
await assignEpisodesToSpace(
|
||||||
|
[episodeDetails.episodeUuid],
|
||||||
|
spaceId,
|
||||||
|
payload.userId,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`Skipping LLM space assignment - episode explicitly assigned to ${episodeBody.spaceIds.length} space(s)`,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
// Only trigger automatic LLM space assignment if no explicit spaceIds were provided
|
||||||
|
logger.info(
|
||||||
|
`Triggering LLM space assignment after successful ingestion`,
|
||||||
|
{
|
||||||
|
userId: payload.userId,
|
||||||
|
workspaceId: payload.workspaceId,
|
||||||
|
episodeId: episodeDetails?.episodeUuid,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
if (
|
||||||
|
episodeDetails.episodeUuid &&
|
||||||
|
currentStatus === IngestionStatus.COMPLETED &&
|
||||||
|
enqueueSpaceAssignment
|
||||||
|
) {
|
||||||
|
await enqueueSpaceAssignment({
|
||||||
|
userId: payload.userId,
|
||||||
|
workspaceId: payload.workspaceId,
|
||||||
|
mode: "episode",
|
||||||
|
episodeIds: episodeUuids,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (assignmentError) {
|
||||||
|
// Don't fail the ingestion if assignment fails
|
||||||
|
logger.warn(`Failed to trigger space assignment after ingestion:`, {
|
||||||
|
error: assignmentError,
|
||||||
|
userId: payload.userId,
|
||||||
|
episodeId: episodeDetails?.episodeUuid,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-trigger session compaction if episode has sessionId
|
||||||
|
try {
|
||||||
|
if (
|
||||||
|
episodeBody.sessionId &&
|
||||||
|
currentStatus === IngestionStatus.COMPLETED &&
|
||||||
|
enqueueSessionCompaction
|
||||||
|
) {
|
||||||
|
logger.info(`Checking if session compaction should be triggered`, {
|
||||||
|
userId: payload.userId,
|
||||||
|
sessionId: episodeBody.sessionId,
|
||||||
|
source: episodeBody.source,
|
||||||
|
});
|
||||||
|
|
||||||
|
await enqueueSessionCompaction({
|
||||||
|
userId: payload.userId,
|
||||||
|
sessionId: episodeBody.sessionId,
|
||||||
|
source: episodeBody.source,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (compactionError) {
|
||||||
|
// Don't fail the ingestion if compaction fails
|
||||||
|
logger.warn(`Failed to trigger session compaction after ingestion:`, {
|
||||||
|
error: compactionError,
|
||||||
|
userId: payload.userId,
|
||||||
|
sessionId: episodeBody.sessionId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-trigger BERT topic analysis if threshold met (20+ new episodes)
|
||||||
|
try {
|
||||||
|
if (
|
||||||
|
currentStatus === IngestionStatus.COMPLETED &&
|
||||||
|
enqueueBertTopicAnalysis
|
||||||
|
) {
|
||||||
|
const shouldTrigger = await shouldTriggerTopicAnalysis(
|
||||||
|
payload.userId,
|
||||||
|
payload.workspaceId,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (shouldTrigger) {
|
||||||
|
logger.info(
|
||||||
|
`Triggering BERT topic analysis after reaching 20+ new episodes`,
|
||||||
|
{
|
||||||
|
userId: payload.userId,
|
||||||
|
workspaceId: payload.workspaceId,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
await enqueueBertTopicAnalysis({
|
||||||
|
userId: payload.userId,
|
||||||
|
workspaceId: payload.workspaceId,
|
||||||
|
minTopicSize: 10,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update the last analysis timestamp
|
||||||
|
await updateLastTopicAnalysisTime(payload.workspaceId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (topicAnalysisError) {
|
||||||
|
// Don't fail the ingestion if topic analysis fails
|
||||||
|
logger.warn(`Failed to trigger topic analysis after ingestion:`, {
|
||||||
|
error: topicAnalysisError,
|
||||||
|
userId: payload.userId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: true, episodeDetails };
|
||||||
|
} catch (err: any) {
|
||||||
|
await prisma.ingestionQueue.update({
|
||||||
|
where: { id: payload.queueId },
|
||||||
|
data: {
|
||||||
|
error: err.message,
|
||||||
|
status: IngestionStatus.FAILED,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.error(`Error processing job for user ${payload.userId}:`, err);
|
||||||
|
return { success: false, error: err.message };
|
||||||
|
}
|
||||||
|
}
|
||||||
455
apps/webapp/app/jobs/session/session-compaction.logic.ts
Normal file
455
apps/webapp/app/jobs/session/session-compaction.logic.ts
Normal file
@ -0,0 +1,455 @@
|
|||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import type { CoreMessage } from "ai";
|
||||||
|
import { z } from "zod";
|
||||||
|
import { getEmbedding, makeModelCall } from "~/lib/model.server";
|
||||||
|
import {
|
||||||
|
getCompactedSessionBySessionId,
|
||||||
|
linkEpisodesToCompact,
|
||||||
|
getSessionEpisodes,
|
||||||
|
type CompactedSessionNode,
|
||||||
|
type SessionEpisodeData,
|
||||||
|
saveCompactedSession,
|
||||||
|
} from "~/services/graphModels/compactedSession";
|
||||||
|
|
||||||
|
export interface SessionCompactionPayload {
|
||||||
|
userId: string;
|
||||||
|
sessionId: string;
|
||||||
|
source: string;
|
||||||
|
triggerSource?: "auto" | "manual" | "threshold";
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SessionCompactionResult {
|
||||||
|
success: boolean;
|
||||||
|
compactionResult?: {
|
||||||
|
compactUuid: string;
|
||||||
|
sessionId: string;
|
||||||
|
summary: string;
|
||||||
|
episodeCount: number;
|
||||||
|
startTime: Date;
|
||||||
|
endTime: Date;
|
||||||
|
confidence: number;
|
||||||
|
compressionRatio: number;
|
||||||
|
};
|
||||||
|
reason?: string;
|
||||||
|
episodeCount?: number;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Zod schema for LLM response validation
|
||||||
|
export const CompactionResultSchema = z.object({
|
||||||
|
summary: z.string().describe("Consolidated narrative of the entire session"),
|
||||||
|
confidence: z
|
||||||
|
.number()
|
||||||
|
.min(0)
|
||||||
|
.max(1)
|
||||||
|
.describe("Confidence score of the compaction quality"),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const CONFIG = {
|
||||||
|
minEpisodesForCompaction: 5, // Minimum episodes to trigger compaction
|
||||||
|
compactionThreshold: 1, // Trigger after N new episodes
|
||||||
|
maxEpisodesPerBatch: 50, // Process in batches if needed
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Core business logic for session compaction
|
||||||
|
* This is shared between Trigger.dev and BullMQ implementations
|
||||||
|
*/
|
||||||
|
export async function processSessionCompaction(
|
||||||
|
payload: SessionCompactionPayload,
|
||||||
|
): Promise<SessionCompactionResult> {
|
||||||
|
const { userId, sessionId, source, triggerSource = "auto" } = payload;
|
||||||
|
|
||||||
|
logger.info(`Starting session compaction`, {
|
||||||
|
userId,
|
||||||
|
sessionId,
|
||||||
|
source,
|
||||||
|
triggerSource,
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check if compaction already exists
|
||||||
|
const existingCompact = await getCompactedSessionBySessionId(
|
||||||
|
sessionId,
|
||||||
|
userId,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Fetch all episodes for this session
|
||||||
|
const episodes = await getSessionEpisodes(
|
||||||
|
sessionId,
|
||||||
|
userId,
|
||||||
|
existingCompact?.endTime,
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log("episodes", episodes.length);
|
||||||
|
// Check if we have enough episodes
|
||||||
|
if (!existingCompact && episodes.length < CONFIG.minEpisodesForCompaction) {
|
||||||
|
logger.info(`Not enough episodes for compaction`, {
|
||||||
|
sessionId,
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
minRequired: CONFIG.minEpisodesForCompaction,
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
reason: "insufficient_episodes",
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
};
|
||||||
|
} else if (
|
||||||
|
existingCompact &&
|
||||||
|
episodes.length <
|
||||||
|
CONFIG.minEpisodesForCompaction + CONFIG.compactionThreshold
|
||||||
|
) {
|
||||||
|
logger.info(`Not enough new episodes for compaction`, {
|
||||||
|
sessionId,
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
minRequired:
|
||||||
|
CONFIG.minEpisodesForCompaction + CONFIG.compactionThreshold,
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
reason: "insufficient_new_episodes",
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate or update compaction
|
||||||
|
const compactionResult = existingCompact
|
||||||
|
? await updateCompaction(existingCompact, episodes, userId)
|
||||||
|
: await createCompaction(sessionId, episodes, userId, source);
|
||||||
|
|
||||||
|
logger.info(`Session compaction completed`, {
|
||||||
|
sessionId,
|
||||||
|
compactUuid: compactionResult.uuid,
|
||||||
|
episodeCount: compactionResult.episodeCount,
|
||||||
|
compressionRatio: compactionResult.compressionRatio,
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
compactionResult: {
|
||||||
|
compactUuid: compactionResult.uuid,
|
||||||
|
sessionId: compactionResult.sessionId,
|
||||||
|
summary: compactionResult.summary,
|
||||||
|
episodeCount: compactionResult.episodeCount,
|
||||||
|
startTime: compactionResult.startTime,
|
||||||
|
endTime: compactionResult.endTime,
|
||||||
|
confidence: compactionResult.confidence,
|
||||||
|
compressionRatio: compactionResult.compressionRatio,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Session compaction failed`, {
|
||||||
|
sessionId,
|
||||||
|
userId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create new compaction
|
||||||
|
*/
|
||||||
|
async function createCompaction(
|
||||||
|
sessionId: string,
|
||||||
|
episodes: SessionEpisodeData[],
|
||||||
|
userId: string,
|
||||||
|
source: string,
|
||||||
|
): Promise<CompactedSessionNode> {
|
||||||
|
logger.info(`Creating new compaction`, {
|
||||||
|
sessionId,
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate compaction using LLM
|
||||||
|
const compactionData = await generateCompaction(episodes, null);
|
||||||
|
|
||||||
|
// Generate embedding for summary
|
||||||
|
const summaryEmbedding = await getEmbedding(compactionData.summary);
|
||||||
|
|
||||||
|
// Create CompactedSession node using graph model
|
||||||
|
const compactUuid = crypto.randomUUID();
|
||||||
|
const now = new Date();
|
||||||
|
const startTime = new Date(episodes[0].createdAt);
|
||||||
|
const endTime = new Date(episodes[episodes.length - 1].createdAt);
|
||||||
|
const episodeUuids = episodes.map((e) => e.uuid);
|
||||||
|
const compressionRatio = episodes.length / 1;
|
||||||
|
|
||||||
|
const compactNode: CompactedSessionNode = {
|
||||||
|
uuid: compactUuid,
|
||||||
|
sessionId,
|
||||||
|
summary: compactionData.summary,
|
||||||
|
summaryEmbedding,
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
startTime,
|
||||||
|
endTime,
|
||||||
|
createdAt: now,
|
||||||
|
confidence: compactionData.confidence,
|
||||||
|
userId,
|
||||||
|
source,
|
||||||
|
compressionRatio,
|
||||||
|
metadata: { triggerType: "create" },
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log("compactNode", compactNode);
|
||||||
|
// Use graph model functions
|
||||||
|
await saveCompactedSession(compactNode);
|
||||||
|
await linkEpisodesToCompact(compactUuid, episodeUuids, userId);
|
||||||
|
|
||||||
|
logger.info(`Compaction created`, {
|
||||||
|
compactUuid,
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
return compactNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update existing compaction with new episodes
|
||||||
|
*/
|
||||||
|
async function updateCompaction(
|
||||||
|
existingCompact: CompactedSessionNode,
|
||||||
|
newEpisodes: SessionEpisodeData[],
|
||||||
|
userId: string,
|
||||||
|
): Promise<CompactedSessionNode> {
|
||||||
|
logger.info(`Updating existing compaction`, {
|
||||||
|
compactUuid: existingCompact.uuid,
|
||||||
|
newEpisodeCount: newEpisodes.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate updated compaction using LLM (merging)
|
||||||
|
const compactionData = await generateCompaction(
|
||||||
|
newEpisodes,
|
||||||
|
existingCompact.summary,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Generate new embedding for updated summary
|
||||||
|
const summaryEmbedding = await getEmbedding(compactionData.summary);
|
||||||
|
|
||||||
|
// Update CompactedSession node using graph model
|
||||||
|
const now = new Date();
|
||||||
|
const endTime = newEpisodes[newEpisodes.length - 1].createdAt;
|
||||||
|
const totalEpisodeCount = existingCompact.episodeCount + newEpisodes.length;
|
||||||
|
const compressionRatio = totalEpisodeCount / 1;
|
||||||
|
const episodeUuids = newEpisodes.map((e) => e.uuid);
|
||||||
|
|
||||||
|
const updatedNode: CompactedSessionNode = {
|
||||||
|
...existingCompact,
|
||||||
|
summary: compactionData.summary,
|
||||||
|
summaryEmbedding,
|
||||||
|
episodeCount: totalEpisodeCount,
|
||||||
|
endTime,
|
||||||
|
updatedAt: now,
|
||||||
|
confidence: compactionData.confidence,
|
||||||
|
compressionRatio,
|
||||||
|
metadata: { triggerType: "update", newEpisodesAdded: newEpisodes.length },
|
||||||
|
};
|
||||||
|
|
||||||
|
// Use graph model functions
|
||||||
|
await saveCompactedSession(updatedNode);
|
||||||
|
await linkEpisodesToCompact(existingCompact.uuid, episodeUuids, userId);
|
||||||
|
|
||||||
|
logger.info(`Compaction updated`, {
|
||||||
|
compactUuid: existingCompact.uuid,
|
||||||
|
totalEpisodeCount,
|
||||||
|
});
|
||||||
|
|
||||||
|
return updatedNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate compaction using LLM (similar to Claude Code's compact approach)
|
||||||
|
*/
|
||||||
|
async function generateCompaction(
|
||||||
|
episodes: SessionEpisodeData[],
|
||||||
|
existingSummary: string | null,
|
||||||
|
): Promise<z.infer<typeof CompactionResultSchema>> {
|
||||||
|
const systemPrompt = createCompactionSystemPrompt();
|
||||||
|
const userPrompt = createCompactionUserPrompt(episodes, existingSummary);
|
||||||
|
|
||||||
|
const messages: CoreMessage[] = [
|
||||||
|
{ role: "system", content: systemPrompt },
|
||||||
|
{ role: "user", content: userPrompt },
|
||||||
|
];
|
||||||
|
|
||||||
|
logger.info(`Generating compaction with LLM`, {
|
||||||
|
episodeCount: episodes.length,
|
||||||
|
hasExistingSummary: !!existingSummary,
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
let responseText = "";
|
||||||
|
await makeModelCall(
|
||||||
|
false,
|
||||||
|
messages,
|
||||||
|
(text: string) => {
|
||||||
|
responseText = text;
|
||||||
|
},
|
||||||
|
undefined,
|
||||||
|
"high",
|
||||||
|
);
|
||||||
|
|
||||||
|
return parseCompactionResponse(responseText);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to generate compaction`, {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* System prompt for compaction (for agent recall/context retrieval)
|
||||||
|
*/
|
||||||
|
function createCompactionSystemPrompt(): string {
|
||||||
|
return `You are a session compaction specialist. Your task is to create a rich, informative summary that will help AI agents understand what happened in this conversation session when they need context for future interactions.
|
||||||
|
|
||||||
|
## PURPOSE
|
||||||
|
|
||||||
|
This summary will be retrieved by AI agents when the user references this session in future conversations. The agent needs enough context to:
|
||||||
|
- Understand what was discussed and why
|
||||||
|
- Know what decisions were made and their rationale
|
||||||
|
- Grasp the outcome and current state
|
||||||
|
- Have relevant technical details to provide informed responses
|
||||||
|
|
||||||
|
## COMPACTION GOALS
|
||||||
|
|
||||||
|
1. **Comprehensive Context**: Capture all important information that might be referenced later
|
||||||
|
2. **Decision Documentation**: Clearly state what was decided, why, and what alternatives were considered
|
||||||
|
3. **Technical Details**: Include specific implementations, tools, configurations, and technical choices
|
||||||
|
4. **Outcome Clarity**: Make it clear what was accomplished and what the final state is
|
||||||
|
5. **Evolution Tracking**: Show how thinking or decisions evolved during the session
|
||||||
|
|
||||||
|
## COMPACTION RULES
|
||||||
|
|
||||||
|
1. **Be Information-Dense**: Pack useful details without fluff or repetition
|
||||||
|
2. **Structure Chronologically**: Start with problem/question, show progression, end with outcome
|
||||||
|
3. **Highlight Key Points**: Emphasize decisions, implementations, results, and learnings
|
||||||
|
4. **Include Specifics**: Names of libraries, specific configurations, metrics, numbers matter
|
||||||
|
5. **Resolve Contradictions**: Always use the most recent/final version when information conflicts
|
||||||
|
|
||||||
|
## OUTPUT REQUIREMENTS
|
||||||
|
|
||||||
|
- **summary**: A detailed, information-rich narrative that tells the complete story
|
||||||
|
- Structure naturally based on content - use as many paragraphs as needed
|
||||||
|
- Each distinct topic, decision, or phase should get its own paragraph(s)
|
||||||
|
- Start with context and initial problem/question
|
||||||
|
- Progress chronologically through discussions, decisions, and implementations
|
||||||
|
- **Final paragraph MUST**: State the outcome, results, and current state
|
||||||
|
- Don't artificially limit length - capture everything important
|
||||||
|
|
||||||
|
- **confidence**: Score (0-1) reflecting how well this summary captures the session's essence
|
||||||
|
|
||||||
|
Your response MUST be valid JSON wrapped in <output></output> tags.
|
||||||
|
|
||||||
|
## KEY PRINCIPLES
|
||||||
|
|
||||||
|
- Write for an AI agent that needs to help the user in future conversations
|
||||||
|
- Include technical specifics that might be referenced (library names, configurations, metrics)
|
||||||
|
- Make outcomes and current state crystal clear in the final paragraph
|
||||||
|
- Show the reasoning behind decisions, not just the decisions themselves
|
||||||
|
- Be comprehensive but concise - every sentence should add value
|
||||||
|
- Each major topic or phase deserves its own paragraph(s)
|
||||||
|
- Don't compress too much - agents need the details
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* User prompt for compaction
|
||||||
|
*/
|
||||||
|
function createCompactionUserPrompt(
|
||||||
|
episodes: SessionEpisodeData[],
|
||||||
|
existingSummary: string | null,
|
||||||
|
): string {
|
||||||
|
let prompt = "";
|
||||||
|
|
||||||
|
if (existingSummary) {
|
||||||
|
prompt += `## EXISTING SUMMARY (from previous compaction)\n\n${existingSummary}\n\n`;
|
||||||
|
prompt += `## NEW EPISODES (to merge into existing summary)\n\n`;
|
||||||
|
} else {
|
||||||
|
prompt += `## SESSION EPISODES (to compact)\n\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
episodes.forEach((episode, index) => {
|
||||||
|
const timestamp = new Date(episode.validAt).toISOString();
|
||||||
|
prompt += `### Episode ${index + 1} (${timestamp})\n`;
|
||||||
|
prompt += `Source: ${episode.source}\n`;
|
||||||
|
prompt += `Content:\n${episode.originalContent}\n\n`;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (existingSummary) {
|
||||||
|
prompt += `\n## INSTRUCTIONS\n\n`;
|
||||||
|
prompt += `Merge the new episodes into the existing summary. Update facts, add new information, and maintain narrative coherence. Ensure the consolidated summary reflects the complete session including both old and new content.\n`;
|
||||||
|
} else {
|
||||||
|
prompt += `\n## INSTRUCTIONS\n\n`;
|
||||||
|
prompt += `Create a compact summary of this entire session. Consolidate all information into a coherent narrative with deduplicated key facts.\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return prompt;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse LLM response for compaction
|
||||||
|
*/
|
||||||
|
function parseCompactionResponse(
|
||||||
|
response: string,
|
||||||
|
): z.infer<typeof CompactionResultSchema> {
|
||||||
|
try {
|
||||||
|
// Extract content from <output> tags
|
||||||
|
const outputMatch = response.match(/<output>([\s\S]*?)<\/output>/);
|
||||||
|
if (!outputMatch) {
|
||||||
|
logger.warn("No <output> tags found in LLM compaction response");
|
||||||
|
logger.debug("Full LLM response:", { response });
|
||||||
|
throw new Error("Invalid LLM response format - missing <output> tags");
|
||||||
|
}
|
||||||
|
|
||||||
|
let jsonContent = outputMatch[1].trim();
|
||||||
|
|
||||||
|
// Remove markdown code blocks if present
|
||||||
|
jsonContent = jsonContent.replace(/```json\n?/g, "").replace(/```\n?/g, "");
|
||||||
|
|
||||||
|
const parsed = JSON.parse(jsonContent);
|
||||||
|
|
||||||
|
// Validate with schema
|
||||||
|
const validated = CompactionResultSchema.parse(parsed);
|
||||||
|
|
||||||
|
return validated;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Failed to parse compaction response", {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
response: response.substring(0, 500),
|
||||||
|
});
|
||||||
|
throw new Error(`Failed to parse compaction response: ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function to check if compaction should be triggered
|
||||||
|
*/
|
||||||
|
export async function shouldTriggerCompaction(
|
||||||
|
sessionId: string,
|
||||||
|
userId: string,
|
||||||
|
): Promise<boolean> {
|
||||||
|
const existingCompact = await getCompactedSessionBySessionId(
|
||||||
|
sessionId,
|
||||||
|
userId,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!existingCompact) {
|
||||||
|
// Check if we have enough episodes for initial compaction
|
||||||
|
const episodes = await getSessionEpisodes(sessionId, userId);
|
||||||
|
return episodes.length >= CONFIG.minEpisodesForCompaction;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if we have enough new episodes to update
|
||||||
|
const newEpisodes = await getSessionEpisodes(
|
||||||
|
sessionId,
|
||||||
|
userId,
|
||||||
|
existingCompact.endTime,
|
||||||
|
);
|
||||||
|
return newEpisodes.length >= CONFIG.compactionThreshold;
|
||||||
|
}
|
||||||
1201
apps/webapp/app/jobs/spaces/space-assignment.logic.ts
Normal file
1201
apps/webapp/app/jobs/spaces/space-assignment.logic.ts
Normal file
File diff suppressed because it is too large
Load Diff
229
apps/webapp/app/jobs/spaces/space-identification.logic.ts
Normal file
229
apps/webapp/app/jobs/spaces/space-identification.logic.ts
Normal file
@ -0,0 +1,229 @@
|
|||||||
|
/**
|
||||||
|
* Space Identification Logic
|
||||||
|
*
|
||||||
|
* Uses LLM to identify appropriate spaces for topics discovered by BERT analysis
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { makeModelCall } from "~/lib/model.server";
|
||||||
|
import { getAllSpacesForUser } from "~/services/graphModels/space";
|
||||||
|
import { getEpisode } from "~/services/graphModels/episode";
|
||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import type { SpaceNode } from "@core/types";
|
||||||
|
|
||||||
|
export interface TopicData {
|
||||||
|
keywords: string[];
|
||||||
|
episodeIds: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SpaceProposal {
|
||||||
|
name: string;
|
||||||
|
intent: string;
|
||||||
|
confidence: number;
|
||||||
|
reason: string;
|
||||||
|
topics: string[]; // Array of topic IDs
|
||||||
|
}
|
||||||
|
|
||||||
|
interface IdentifySpacesParams {
|
||||||
|
userId: string;
|
||||||
|
topics: Record<string, TopicData>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Identify spaces for topics using LLM analysis
|
||||||
|
* Takes top 10 keywords and top 5 episodes per topic
|
||||||
|
*/
|
||||||
|
export async function identifySpacesForTopics(
|
||||||
|
params: IdentifySpacesParams,
|
||||||
|
): Promise<SpaceProposal[]> {
|
||||||
|
const { userId, topics } = params;
|
||||||
|
|
||||||
|
// Get existing spaces for the user
|
||||||
|
const existingSpaces = await getAllSpacesForUser(userId);
|
||||||
|
|
||||||
|
// Prepare topic data with top 10 keywords and top 5 episodes
|
||||||
|
const topicsForAnalysis = await Promise.all(
|
||||||
|
Object.entries(topics).map(async ([topicId, topicData]) => {
|
||||||
|
// Take top 10 keywords
|
||||||
|
const topKeywords = topicData.keywords.slice(0, 10);
|
||||||
|
|
||||||
|
// Take top 5 episodes and fetch their content
|
||||||
|
const topEpisodeIds = topicData.episodeIds.slice(0, 5);
|
||||||
|
const episodes = await Promise.all(
|
||||||
|
topEpisodeIds.map((id) => getEpisode(id)),
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
topicId,
|
||||||
|
keywords: topKeywords,
|
||||||
|
episodes: episodes
|
||||||
|
.filter((e) => e !== null)
|
||||||
|
.map((e) => ({
|
||||||
|
content: e!.content.substring(0, 500), // Limit to 500 chars per episode
|
||||||
|
})),
|
||||||
|
episodeCount: topicData.episodeIds.length,
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Build the prompt
|
||||||
|
const prompt = buildSpaceIdentificationPrompt(
|
||||||
|
existingSpaces,
|
||||||
|
topicsForAnalysis,
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info("Identifying spaces for topics", {
|
||||||
|
userId,
|
||||||
|
topicCount: Object.keys(topics).length,
|
||||||
|
existingSpaceCount: existingSpaces.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Call LLM with structured output
|
||||||
|
let responseText = "";
|
||||||
|
await makeModelCall(
|
||||||
|
false, // not streaming
|
||||||
|
[{ role: "user", content: prompt }],
|
||||||
|
(text) => {
|
||||||
|
responseText = text;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
temperature: 0.7,
|
||||||
|
},
|
||||||
|
"high", // Use high complexity for space identification
|
||||||
|
);
|
||||||
|
|
||||||
|
// Parse the response
|
||||||
|
const proposals = parseSpaceProposals(responseText);
|
||||||
|
|
||||||
|
logger.info("Space identification completed", {
|
||||||
|
userId,
|
||||||
|
proposalCount: proposals.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
return proposals;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build the prompt for space identification
|
||||||
|
*/
|
||||||
|
function buildSpaceIdentificationPrompt(
|
||||||
|
existingSpaces: SpaceNode[],
|
||||||
|
topics: Array<{
|
||||||
|
topicId: string;
|
||||||
|
keywords: string[];
|
||||||
|
episodes: Array<{ content: string }>;
|
||||||
|
episodeCount: number;
|
||||||
|
}>,
|
||||||
|
): string {
|
||||||
|
const existingSpacesSection =
|
||||||
|
existingSpaces.length > 0
|
||||||
|
? `## Existing Spaces
|
||||||
|
|
||||||
|
The user currently has these spaces:
|
||||||
|
${existingSpaces.map((s) => `- **${s.name}**: ${s.description || "No description"} (${s.contextCount || 0} episodes)`).join("\n")}
|
||||||
|
|
||||||
|
When identifying new spaces, consider if topics fit into existing spaces or if new spaces are needed.`
|
||||||
|
: `## Existing Spaces
|
||||||
|
|
||||||
|
The user currently has no spaces defined. This is a fresh start for space organization.`;
|
||||||
|
|
||||||
|
const topicsSection = `## Topics Discovered
|
||||||
|
|
||||||
|
BERT topic modeling has identified ${topics.length} distinct topics from the user's episodes. Each topic represents a cluster of semantically related content.
|
||||||
|
|
||||||
|
${topics
|
||||||
|
.map(
|
||||||
|
(t, idx) => `### Topic ${idx + 1} (ID: ${t.topicId})
|
||||||
|
**Episode Count**: ${t.episodeCount}
|
||||||
|
**Top Keywords**: ${t.keywords.join(", ")}
|
||||||
|
|
||||||
|
**Sample Episodes** (showing ${t.episodes.length} of ${t.episodeCount}):
|
||||||
|
${t.episodes.map((e, i) => `${i + 1}. ${e.content}`).join("\n")}
|
||||||
|
`,
|
||||||
|
)
|
||||||
|
.join("\n")}`;
|
||||||
|
|
||||||
|
return `You are a knowledge organization expert. Your task is to analyze discovered topics and identify appropriate "spaces" (thematic containers) for organizing episodic memories.
|
||||||
|
|
||||||
|
${existingSpacesSection}
|
||||||
|
|
||||||
|
${topicsSection}
|
||||||
|
|
||||||
|
## Task
|
||||||
|
|
||||||
|
Analyze the topics above and identify spaces that would help organize this content meaningfully. For each space:
|
||||||
|
|
||||||
|
1. **Consider existing spaces first**: If topics clearly belong to existing spaces, assign them there
|
||||||
|
2. **Create new spaces when needed**: If topics represent distinct themes not covered by existing spaces
|
||||||
|
3. **Group related topics**: Multiple topics can be assigned to the same space if they share a theme
|
||||||
|
4. **Aim for 20-50 episodes per space**: This is the sweet spot for space cohesion
|
||||||
|
5. **Focus on user intent**: What would help the user find and understand this content later?
|
||||||
|
|
||||||
|
## Output Format
|
||||||
|
|
||||||
|
Return your analysis as a JSON array of space proposals. Each proposal should have:
|
||||||
|
|
||||||
|
\`\`\`json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"name": "Space name (use existing space name if assigning to existing space)",
|
||||||
|
"intent": "Clear description of what this space represents",
|
||||||
|
"confidence": 0.85,
|
||||||
|
"reason": "Brief explanation of why these topics belong together",
|
||||||
|
"topics": ["topic-id-1", "topic-id-2"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
\`\`\`
|
||||||
|
|
||||||
|
**Important Guidelines**:
|
||||||
|
- **confidence**: 0.0-1.0 scale indicating how confident you are this is a good grouping
|
||||||
|
- **topics**: Array of topic IDs (use the exact IDs from above like "0", "1", "-1", etc.)
|
||||||
|
- **name**: For existing spaces, use the EXACT name. For new spaces, create a clear, concise name
|
||||||
|
- Only propose spaces with confidence >= 0.6
|
||||||
|
- Each topic should only appear in ONE space proposal
|
||||||
|
- Topic "-1" is the outlier topic (noise) - only include if it genuinely fits a theme
|
||||||
|
|
||||||
|
Return ONLY the JSON array, no additional text.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse space proposals from LLM response
|
||||||
|
*/
|
||||||
|
function parseSpaceProposals(responseText: string): SpaceProposal[] {
|
||||||
|
try {
|
||||||
|
// Extract JSON from markdown code blocks if present
|
||||||
|
const jsonMatch = responseText.match(/```(?:json)?\s*(\[[\s\S]*?\])\s*```/);
|
||||||
|
const jsonText = jsonMatch ? jsonMatch[1] : responseText;
|
||||||
|
|
||||||
|
const proposals = JSON.parse(jsonText.trim());
|
||||||
|
|
||||||
|
if (!Array.isArray(proposals)) {
|
||||||
|
throw new Error("Response is not an array");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate and filter proposals
|
||||||
|
return proposals
|
||||||
|
.filter((p) => {
|
||||||
|
return (
|
||||||
|
p.name &&
|
||||||
|
p.intent &&
|
||||||
|
typeof p.confidence === "number" &&
|
||||||
|
p.confidence >= 0.6 &&
|
||||||
|
Array.isArray(p.topics) &&
|
||||||
|
p.topics.length > 0
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.map((p) => ({
|
||||||
|
name: p.name.trim(),
|
||||||
|
intent: p.intent.trim(),
|
||||||
|
confidence: p.confidence,
|
||||||
|
reason: (p.reason || "").trim(),
|
||||||
|
topics: p.topics.map((t: any) => String(t)),
|
||||||
|
}));
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Failed to parse space proposals", {
|
||||||
|
error,
|
||||||
|
responseText: responseText.substring(0, 500),
|
||||||
|
});
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
721
apps/webapp/app/jobs/spaces/space-summary.logic.ts
Normal file
721
apps/webapp/app/jobs/spaces/space-summary.logic.ts
Normal file
@ -0,0 +1,721 @@
|
|||||||
|
import { logger } from "~/services/logger.service";
|
||||||
|
import { SpaceService } from "~/services/space.server";
|
||||||
|
import { makeModelCall } from "~/lib/model.server";
|
||||||
|
import { runQuery } from "~/lib/neo4j.server";
|
||||||
|
import { updateSpaceStatus, SPACE_STATUS } from "~/trigger/utils/space-status";
|
||||||
|
import type { CoreMessage } from "ai";
|
||||||
|
import { z } from "zod";
|
||||||
|
import { getSpace, updateSpace } from "~/trigger/utils/space-utils";
|
||||||
|
import { getSpaceEpisodeCount } from "~/services/graphModels/space";
|
||||||
|
|
||||||
|
export interface SpaceSummaryPayload {
|
||||||
|
userId: string;
|
||||||
|
spaceId: string; // Single space only
|
||||||
|
triggerSource?: "assignment" | "manual" | "scheduled";
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SpaceEpisodeData {
|
||||||
|
uuid: string;
|
||||||
|
content: string;
|
||||||
|
originalContent: string;
|
||||||
|
source: string;
|
||||||
|
createdAt: Date;
|
||||||
|
validAt: Date;
|
||||||
|
metadata: any;
|
||||||
|
sessionId: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SpaceSummaryData {
|
||||||
|
spaceId: string;
|
||||||
|
spaceName: string;
|
||||||
|
spaceDescription?: string;
|
||||||
|
contextCount: number;
|
||||||
|
summary: string;
|
||||||
|
keyEntities: string[];
|
||||||
|
themes: string[];
|
||||||
|
confidence: number;
|
||||||
|
lastUpdated: Date;
|
||||||
|
isIncremental: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Zod schema for LLM response validation
|
||||||
|
const SummaryResultSchema = z.object({
|
||||||
|
summary: z.string(),
|
||||||
|
keyEntities: z.array(z.string()),
|
||||||
|
themes: z.array(z.string()),
|
||||||
|
confidence: z.number().min(0).max(1),
|
||||||
|
});
|
||||||
|
|
||||||
|
const CONFIG = {
|
||||||
|
maxEpisodesForSummary: 20, // Limit episodes for performance
|
||||||
|
minEpisodesForSummary: 1, // Minimum episodes to generate summary
|
||||||
|
summaryEpisodeThreshold: 5, // Minimum new episodes required to trigger summary (configurable)
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface SpaceSummaryResult {
|
||||||
|
success: boolean;
|
||||||
|
spaceId: string;
|
||||||
|
triggerSource: string;
|
||||||
|
summary?: {
|
||||||
|
statementCount: number;
|
||||||
|
confidence: number;
|
||||||
|
themesCount: number;
|
||||||
|
} | null;
|
||||||
|
reason?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Core business logic for space summary generation
|
||||||
|
* This is shared between Trigger.dev and BullMQ implementations
|
||||||
|
*/
|
||||||
|
export async function processSpaceSummary(
|
||||||
|
payload: SpaceSummaryPayload,
|
||||||
|
): Promise<SpaceSummaryResult> {
|
||||||
|
const { userId, spaceId, triggerSource = "manual" } = payload;
|
||||||
|
|
||||||
|
logger.info(`Starting space summary generation`, {
|
||||||
|
userId,
|
||||||
|
spaceId,
|
||||||
|
triggerSource,
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Update status to processing
|
||||||
|
await updateSpaceStatus(spaceId, SPACE_STATUS.PROCESSING, {
|
||||||
|
userId,
|
||||||
|
operation: "space-summary",
|
||||||
|
metadata: { triggerSource, phase: "start_summary" },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate summary for the single space
|
||||||
|
const summaryResult = await generateSpaceSummary(
|
||||||
|
spaceId,
|
||||||
|
userId,
|
||||||
|
triggerSource,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (summaryResult) {
|
||||||
|
// Store the summary
|
||||||
|
await storeSummary(summaryResult);
|
||||||
|
|
||||||
|
// Update status to ready after successful completion
|
||||||
|
await updateSpaceStatus(spaceId, SPACE_STATUS.READY, {
|
||||||
|
userId,
|
||||||
|
operation: "space-summary",
|
||||||
|
metadata: {
|
||||||
|
triggerSource,
|
||||||
|
phase: "completed_summary",
|
||||||
|
contextCount: summaryResult.contextCount,
|
||||||
|
confidence: summaryResult.confidence,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`Generated summary for space ${spaceId}`, {
|
||||||
|
statementCount: summaryResult.contextCount,
|
||||||
|
confidence: summaryResult.confidence,
|
||||||
|
themes: summaryResult.themes.length,
|
||||||
|
triggerSource,
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
spaceId,
|
||||||
|
triggerSource,
|
||||||
|
summary: {
|
||||||
|
statementCount: summaryResult.contextCount,
|
||||||
|
confidence: summaryResult.confidence,
|
||||||
|
themesCount: summaryResult.themes.length,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// No summary generated - this could be due to insufficient episodes or no new episodes
|
||||||
|
// This is not an error state, so update status to ready
|
||||||
|
await updateSpaceStatus(spaceId, SPACE_STATUS.READY, {
|
||||||
|
userId,
|
||||||
|
operation: "space-summary",
|
||||||
|
metadata: {
|
||||||
|
triggerSource,
|
||||||
|
phase: "no_summary_needed",
|
||||||
|
reason: "Insufficient episodes or no new episodes to summarize",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`No summary generated for space ${spaceId} - insufficient or no new episodes`,
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
spaceId,
|
||||||
|
triggerSource,
|
||||||
|
summary: null,
|
||||||
|
reason: "No episodes to summarize",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Update status to error on exception
|
||||||
|
try {
|
||||||
|
await updateSpaceStatus(spaceId, SPACE_STATUS.ERROR, {
|
||||||
|
userId,
|
||||||
|
operation: "space-summary",
|
||||||
|
metadata: {
|
||||||
|
triggerSource,
|
||||||
|
phase: "exception",
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (statusError) {
|
||||||
|
logger.warn(`Failed to update status to error for space ${spaceId}`, {
|
||||||
|
statusError,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error(
|
||||||
|
`Error in space summary generation for space ${spaceId}:`,
|
||||||
|
error as Record<string, unknown>,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function generateSpaceSummary(
|
||||||
|
spaceId: string,
|
||||||
|
userId: string,
|
||||||
|
triggerSource?: "assignment" | "manual" | "scheduled",
|
||||||
|
): Promise<SpaceSummaryData | null> {
|
||||||
|
try {
|
||||||
|
// 1. Get space details
|
||||||
|
const spaceService = new SpaceService();
|
||||||
|
const space = await spaceService.getSpace(spaceId, userId);
|
||||||
|
|
||||||
|
if (!space) {
|
||||||
|
logger.warn(`Space ${spaceId} not found for user ${userId}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Check episode count threshold (skip for manual triggers)
|
||||||
|
if (triggerSource !== "manual") {
|
||||||
|
const currentEpisodeCount = await getSpaceEpisodeCount(spaceId, userId);
|
||||||
|
const lastSummaryEpisodeCount = space.contextCount || 0;
|
||||||
|
const episodeDifference = currentEpisodeCount - lastSummaryEpisodeCount;
|
||||||
|
|
||||||
|
if (
|
||||||
|
episodeDifference < CONFIG.summaryEpisodeThreshold ||
|
||||||
|
lastSummaryEpisodeCount !== 0
|
||||||
|
) {
|
||||||
|
logger.info(
|
||||||
|
`Skipping summary generation for space ${spaceId}: only ${episodeDifference} new episodes (threshold: ${CONFIG.summaryEpisodeThreshold})`,
|
||||||
|
{
|
||||||
|
currentEpisodeCount,
|
||||||
|
lastSummaryEpisodeCount,
|
||||||
|
episodeDifference,
|
||||||
|
threshold: CONFIG.summaryEpisodeThreshold,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`Proceeding with summary generation for space ${spaceId}: ${episodeDifference} new episodes (threshold: ${CONFIG.summaryEpisodeThreshold})`,
|
||||||
|
{
|
||||||
|
currentEpisodeCount,
|
||||||
|
lastSummaryEpisodeCount,
|
||||||
|
episodeDifference,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Check for existing summary
|
||||||
|
const existingSummary = await getExistingSummary(spaceId);
|
||||||
|
const isIncremental = existingSummary !== null;
|
||||||
|
|
||||||
|
// 3. Get episodes (all or new ones based on existing summary)
|
||||||
|
const episodes = await getSpaceEpisodes(
|
||||||
|
spaceId,
|
||||||
|
userId,
|
||||||
|
isIncremental ? existingSummary?.lastUpdated : undefined,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Handle case where no new episodes exist for incremental update
|
||||||
|
if (isIncremental && episodes.length === 0) {
|
||||||
|
logger.info(
|
||||||
|
`No new episodes found for space ${spaceId}, skipping summary update`,
|
||||||
|
);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check minimum episode requirement for new summaries only
|
||||||
|
if (!isIncremental && episodes.length < CONFIG.minEpisodesForSummary) {
|
||||||
|
logger.info(
|
||||||
|
`Space ${spaceId} has insufficient episodes (${episodes.length}) for new summary`,
|
||||||
|
);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Process episodes using unified approach
|
||||||
|
let summaryResult;
|
||||||
|
|
||||||
|
if (episodes.length > CONFIG.maxEpisodesForSummary) {
|
||||||
|
logger.info(
|
||||||
|
`Large space detected (${episodes.length} episodes). Processing in batches.`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Process in batches, each building on previous result
|
||||||
|
const batches: SpaceEpisodeData[][] = [];
|
||||||
|
for (let i = 0; i < episodes.length; i += CONFIG.maxEpisodesForSummary) {
|
||||||
|
batches.push(episodes.slice(i, i + CONFIG.maxEpisodesForSummary));
|
||||||
|
}
|
||||||
|
|
||||||
|
let currentSummary = existingSummary?.summary || null;
|
||||||
|
let currentThemes = existingSummary?.themes || [];
|
||||||
|
let cumulativeConfidence = 0;
|
||||||
|
|
||||||
|
for (const [batchIndex, batch] of batches.entries()) {
|
||||||
|
logger.info(
|
||||||
|
`Processing batch ${batchIndex + 1}/${batches.length} with ${batch.length} episodes`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const batchResult = await generateUnifiedSummary(
|
||||||
|
space.name,
|
||||||
|
space.description as string,
|
||||||
|
batch,
|
||||||
|
currentSummary,
|
||||||
|
currentThemes,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (batchResult) {
|
||||||
|
currentSummary = batchResult.summary;
|
||||||
|
currentThemes = batchResult.themes;
|
||||||
|
cumulativeConfidence += batchResult.confidence;
|
||||||
|
} else {
|
||||||
|
logger.warn(`Failed to process batch ${batchIndex + 1}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Small delay between batches
|
||||||
|
if (batchIndex < batches.length - 1) {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
summaryResult = currentSummary
|
||||||
|
? {
|
||||||
|
summary: currentSummary,
|
||||||
|
themes: currentThemes,
|
||||||
|
confidence: Math.min(cumulativeConfidence / batches.length, 1.0),
|
||||||
|
}
|
||||||
|
: null;
|
||||||
|
} else {
|
||||||
|
logger.info(
|
||||||
|
`Processing ${episodes.length} episodes with unified approach`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Use unified approach for smaller spaces
|
||||||
|
summaryResult = await generateUnifiedSummary(
|
||||||
|
space.name,
|
||||||
|
space.description as string,
|
||||||
|
episodes,
|
||||||
|
existingSummary?.summary || null,
|
||||||
|
existingSummary?.themes || [],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!summaryResult) {
|
||||||
|
logger.warn(`Failed to generate LLM summary for space ${spaceId}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the actual current counts from Neo4j
|
||||||
|
const currentEpisodeCount = await getSpaceEpisodeCount(spaceId, userId);
|
||||||
|
|
||||||
|
return {
|
||||||
|
spaceId: space.uuid,
|
||||||
|
spaceName: space.name,
|
||||||
|
spaceDescription: space.description as string,
|
||||||
|
contextCount: currentEpisodeCount,
|
||||||
|
summary: summaryResult.summary,
|
||||||
|
keyEntities: summaryResult.keyEntities || [],
|
||||||
|
themes: summaryResult.themes,
|
||||||
|
confidence: summaryResult.confidence,
|
||||||
|
lastUpdated: new Date(),
|
||||||
|
isIncremental,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(
|
||||||
|
`Error generating summary for space ${spaceId}:`,
|
||||||
|
error as Record<string, unknown>,
|
||||||
|
);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function generateUnifiedSummary(
|
||||||
|
spaceName: string,
|
||||||
|
spaceDescription: string | undefined,
|
||||||
|
episodes: SpaceEpisodeData[],
|
||||||
|
previousSummary: string | null = null,
|
||||||
|
previousThemes: string[] = [],
|
||||||
|
): Promise<{
|
||||||
|
summary: string;
|
||||||
|
themes: string[];
|
||||||
|
confidence: number;
|
||||||
|
keyEntities?: string[];
|
||||||
|
} | null> {
|
||||||
|
try {
|
||||||
|
const prompt = createUnifiedSummaryPrompt(
|
||||||
|
spaceName,
|
||||||
|
spaceDescription,
|
||||||
|
episodes,
|
||||||
|
previousSummary,
|
||||||
|
previousThemes,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Space summary generation requires HIGH complexity (creative synthesis, narrative generation)
|
||||||
|
let responseText = "";
|
||||||
|
await makeModelCall(
|
||||||
|
false,
|
||||||
|
prompt,
|
||||||
|
(text: string) => {
|
||||||
|
responseText = text;
|
||||||
|
},
|
||||||
|
undefined,
|
||||||
|
"high",
|
||||||
|
);
|
||||||
|
|
||||||
|
return parseSummaryResponse(responseText);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(
|
||||||
|
"Error generating unified summary:",
|
||||||
|
error as Record<string, unknown>,
|
||||||
|
);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function createUnifiedSummaryPrompt(
|
||||||
|
spaceName: string,
|
||||||
|
spaceDescription: string | undefined,
|
||||||
|
episodes: SpaceEpisodeData[],
|
||||||
|
previousSummary: string | null,
|
||||||
|
previousThemes: string[],
|
||||||
|
): CoreMessage[] {
|
||||||
|
// If there are no episodes and no previous summary, we cannot generate a meaningful summary
|
||||||
|
if (episodes.length === 0 && previousSummary === null) {
|
||||||
|
throw new Error(
|
||||||
|
"Cannot generate summary without episodes or existing summary",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const episodesText = episodes
|
||||||
|
.map(
|
||||||
|
(episode) =>
|
||||||
|
`- ${episode.content} (Source: ${episode.source}, Session: ${episode.sessionId || "N/A"})`,
|
||||||
|
)
|
||||||
|
.join("\n");
|
||||||
|
|
||||||
|
// Extract key entities and themes from episode content
|
||||||
|
const contentWords = episodes
|
||||||
|
.map((ep) => ep.content.toLowerCase())
|
||||||
|
.join(" ")
|
||||||
|
.split(/\s+/)
|
||||||
|
.filter((word) => word.length > 3);
|
||||||
|
|
||||||
|
const wordFrequency = new Map<string, number>();
|
||||||
|
contentWords.forEach((word) => {
|
||||||
|
wordFrequency.set(word, (wordFrequency.get(word) || 0) + 1);
|
||||||
|
});
|
||||||
|
|
||||||
|
const topEntities = Array.from(wordFrequency.entries())
|
||||||
|
.sort(([, a], [, b]) => b - a)
|
||||||
|
.slice(0, 10)
|
||||||
|
.map(([word]) => word);
|
||||||
|
|
||||||
|
const isUpdate = previousSummary !== null;
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: `You are an expert at analyzing and summarizing episodes within semantic spaces based on the space's intent and purpose. Your task is to ${isUpdate ? "update an existing summary by integrating new episodes" : "create a comprehensive summary of episodes"}.
|
||||||
|
|
||||||
|
CRITICAL RULES:
|
||||||
|
1. Base your summary ONLY on insights derived from the actual content/episodes provided
|
||||||
|
2. Use the space's INTENT/PURPOSE (from description) to guide what to summarize and how to organize it
|
||||||
|
3. Write in a factual, neutral tone - avoid promotional language ("pivotal", "invaluable", "cutting-edge")
|
||||||
|
4. Be specific and concrete - reference actual content, patterns, and insights found in the episodes
|
||||||
|
5. If episodes are insufficient for meaningful insights, state that more data is needed
|
||||||
|
|
||||||
|
INTENT-DRIVEN SUMMARIZATION:
|
||||||
|
Your summary should SERVE the space's intended purpose. Examples:
|
||||||
|
- "Learning React" → Summarize React concepts, patterns, techniques learned
|
||||||
|
- "Project X Updates" → Summarize progress, decisions, blockers, next steps
|
||||||
|
- "Health Tracking" → Summarize metrics, trends, observations, insights
|
||||||
|
- "Guidelines for React" → Extract actionable patterns, best practices, rules
|
||||||
|
- "Evolution of design thinking" → Track how thinking changed over time, decision points
|
||||||
|
The intent defines WHY this space exists - organize content to serve that purpose.
|
||||||
|
|
||||||
|
INSTRUCTIONS:
|
||||||
|
${
|
||||||
|
isUpdate
|
||||||
|
? `1. Review the existing summary and themes carefully
|
||||||
|
2. Analyze the new episodes for patterns and insights that align with the space's intent
|
||||||
|
3. Identify connecting points between existing knowledge and new episodes
|
||||||
|
4. Update the summary to seamlessly integrate new information while preserving valuable existing insights
|
||||||
|
5. Evolve themes by adding new ones or refining existing ones based on the space's purpose
|
||||||
|
6. Organize the summary to serve the space's intended use case`
|
||||||
|
: `1. Analyze the semantic content and relationships within the episodes
|
||||||
|
2. Identify topics/sections that align with the space's INTENT and PURPOSE
|
||||||
|
3. Create a coherent summary that serves the space's intended use case
|
||||||
|
4. Organize the summary based on the space's purpose (not generic frequency-based themes)`
|
||||||
|
}
|
||||||
|
${isUpdate ? "7" : "5"}. Assess your confidence in the ${isUpdate ? "updated" : ""} summary quality (0.0-1.0)
|
||||||
|
|
||||||
|
INTENT-ALIGNED ORGANIZATION:
|
||||||
|
- Organize sections based on what serves the space's purpose
|
||||||
|
- Topics don't need minimum episode counts - relevance to intent matters most
|
||||||
|
- Each section should provide value aligned with the space's intended use
|
||||||
|
- For "guidelines" spaces: focus on actionable patterns
|
||||||
|
- For "tracking" spaces: focus on temporal patterns and changes
|
||||||
|
- For "learning" spaces: focus on concepts and insights gained
|
||||||
|
- Let the space's intent drive the structure, not rigid rules
|
||||||
|
|
||||||
|
${
|
||||||
|
isUpdate
|
||||||
|
? `CONNECTION FOCUS:
|
||||||
|
- Entity relationships that span across batches/time
|
||||||
|
- Theme evolution and expansion
|
||||||
|
- Temporal patterns and progressions
|
||||||
|
- Contradictions or confirmations of existing insights
|
||||||
|
- New insights that complement existing knowledge`
|
||||||
|
: ""
|
||||||
|
}
|
||||||
|
|
||||||
|
RESPONSE FORMAT:
|
||||||
|
Provide your response inside <output></output> tags with valid JSON. Include both HTML summary and markdown format.
|
||||||
|
|
||||||
|
<output>
|
||||||
|
{
|
||||||
|
"summary": "${isUpdate ? "Updated HTML summary that integrates new insights with existing knowledge. Write factually about what the statements reveal - mention specific entities, relationships, and patterns found in the data. Avoid marketing language. Use HTML tags for structure." : "Factual HTML summary based on patterns found in the statements. Report what the data actually shows - specific entities, relationships, frequencies, and concrete insights. Avoid promotional language. Use HTML tags like <p>, <strong>, <ul>, <li> for structure. Keep it concise and evidence-based."}",
|
||||||
|
"keyEntities": ["entity1", "entity2", "entity3"],
|
||||||
|
"themes": ["${isUpdate ? 'updated_theme1", "new_theme2", "evolved_theme3' : 'theme1", "theme2", "theme3'}"],
|
||||||
|
"confidence": 0.85
|
||||||
|
}
|
||||||
|
</output>
|
||||||
|
|
||||||
|
JSON FORMATTING RULES:
|
||||||
|
- HTML content in summary field is allowed and encouraged
|
||||||
|
- Escape quotes within strings as \"
|
||||||
|
- Escape HTML angle brackets if needed: < and >
|
||||||
|
- Use proper HTML tags for structure: <p>, <strong>, <em>, <ul>, <li>, <h3>, etc.
|
||||||
|
- HTML content should be well-formed and semantic
|
||||||
|
|
||||||
|
GUIDELINES:
|
||||||
|
${
|
||||||
|
isUpdate
|
||||||
|
? `- Preserve valuable insights from existing summary
|
||||||
|
- Integrate new information by highlighting connections
|
||||||
|
- Themes should evolve naturally, don't replace wholesale
|
||||||
|
- The updated summary should read as a coherent whole
|
||||||
|
- Make the summary user-friendly and explain what value this space provides`
|
||||||
|
: `- Report only what the episodes actually reveal - be specific and concrete
|
||||||
|
- Cite actual content and patterns found in the episodes
|
||||||
|
- Avoid generic descriptions that could apply to any space
|
||||||
|
- Use neutral, factual language - no "comprehensive", "robust", "cutting-edge" etc.
|
||||||
|
- Themes must be backed by at least 3 supporting episodes with clear evidence
|
||||||
|
- Better to have fewer, well-supported themes than many weak ones
|
||||||
|
- Confidence should reflect actual data quality and coverage, not aspirational goals`
|
||||||
|
}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: `SPACE INFORMATION:
|
||||||
|
Name: "${spaceName}"
|
||||||
|
Intent/Purpose: ${spaceDescription || "No specific intent provided - organize naturally based on content"}
|
||||||
|
|
||||||
|
${
|
||||||
|
isUpdate
|
||||||
|
? `EXISTING SUMMARY:
|
||||||
|
${previousSummary}
|
||||||
|
|
||||||
|
EXISTING THEMES:
|
||||||
|
${previousThemes.join(", ")}
|
||||||
|
|
||||||
|
NEW EPISODES TO INTEGRATE (${episodes.length} episodes):`
|
||||||
|
: `EPISODES IN THIS SPACE (${episodes.length} episodes):`
|
||||||
|
}
|
||||||
|
${episodesText}
|
||||||
|
|
||||||
|
${
|
||||||
|
episodes.length > 0
|
||||||
|
? `TOP WORDS BY FREQUENCY:
|
||||||
|
${topEntities.join(", ")}`
|
||||||
|
: ""
|
||||||
|
}
|
||||||
|
|
||||||
|
${
|
||||||
|
isUpdate
|
||||||
|
? "Please identify connections between the existing summary and new episodes, then update the summary to integrate the new insights coherently. Organize the summary to SERVE the space's intent/purpose. Remember: only summarize insights from the actual episode content."
|
||||||
|
: "Please analyze the episodes and provide a comprehensive summary that SERVES the space's intent/purpose. Organize sections based on what would be most valuable for this space's intended use case. If the intent is unclear, organize naturally based on content patterns. Only summarize insights from actual episode content."
|
||||||
|
}`,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getExistingSummary(spaceId: string): Promise<{
|
||||||
|
summary: string;
|
||||||
|
themes: string[];
|
||||||
|
lastUpdated: Date;
|
||||||
|
contextCount: number;
|
||||||
|
} | null> {
|
||||||
|
try {
|
||||||
|
const existingSummary = await getSpace(spaceId);
|
||||||
|
|
||||||
|
if (existingSummary?.summary) {
|
||||||
|
return {
|
||||||
|
summary: existingSummary.summary,
|
||||||
|
themes: existingSummary.themes,
|
||||||
|
lastUpdated: existingSummary.summaryGeneratedAt || new Date(),
|
||||||
|
contextCount: existingSummary.contextCount || 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`Failed to get existing summary for space ${spaceId}:`, {
|
||||||
|
error,
|
||||||
|
});
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getSpaceEpisodes(
|
||||||
|
spaceId: string,
|
||||||
|
userId: string,
|
||||||
|
sinceDate?: Date,
|
||||||
|
): Promise<SpaceEpisodeData[]> {
|
||||||
|
// Query episodes directly using Space-[:HAS_EPISODE]->Episode relationships
|
||||||
|
const params: any = { spaceId, userId };
|
||||||
|
|
||||||
|
let dateCondition = "";
|
||||||
|
if (sinceDate) {
|
||||||
|
dateCondition = "AND e.createdAt > $sinceDate";
|
||||||
|
params.sinceDate = sinceDate.toISOString();
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = `
|
||||||
|
MATCH (space:Space {uuid: $spaceId, userId: $userId})-[:HAS_EPISODE]->(e:Episode {userId: $userId})
|
||||||
|
WHERE e IS NOT NULL ${dateCondition}
|
||||||
|
RETURN DISTINCT e
|
||||||
|
ORDER BY e.createdAt DESC
|
||||||
|
`;
|
||||||
|
|
||||||
|
const result = await runQuery(query, params);
|
||||||
|
|
||||||
|
return result.map((record) => {
|
||||||
|
const episode = record.get("e").properties;
|
||||||
|
return {
|
||||||
|
uuid: episode.uuid,
|
||||||
|
content: episode.content,
|
||||||
|
originalContent: episode.originalContent,
|
||||||
|
source: episode.source,
|
||||||
|
createdAt: new Date(episode.createdAt),
|
||||||
|
validAt: new Date(episode.validAt),
|
||||||
|
metadata: JSON.parse(episode.metadata || "{}"),
|
||||||
|
sessionId: episode.sessionId,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseSummaryResponse(response: string): {
|
||||||
|
summary: string;
|
||||||
|
themes: string[];
|
||||||
|
confidence: number;
|
||||||
|
keyEntities?: string[];
|
||||||
|
} | null {
|
||||||
|
try {
|
||||||
|
// Extract content from <output> tags
|
||||||
|
const outputMatch = response.match(/<output>([\s\S]*?)<\/output>/);
|
||||||
|
if (!outputMatch) {
|
||||||
|
logger.warn("No <output> tags found in LLM summary response");
|
||||||
|
logger.debug("Full LLM response:", { response });
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
let jsonContent = outputMatch[1].trim();
|
||||||
|
|
||||||
|
let parsed;
|
||||||
|
try {
|
||||||
|
parsed = JSON.parse(jsonContent);
|
||||||
|
} catch (jsonError) {
|
||||||
|
logger.warn("JSON parsing failed, attempting cleanup and retry", {
|
||||||
|
originalError: jsonError,
|
||||||
|
jsonContent: jsonContent.substring(0, 500) + "...", // Log first 500 chars
|
||||||
|
});
|
||||||
|
|
||||||
|
// More aggressive cleanup for malformed JSON
|
||||||
|
jsonContent = jsonContent
|
||||||
|
.replace(/([^\\])"/g, '$1\\"') // Escape unescaped quotes
|
||||||
|
.replace(/^"/g, '\\"') // Escape quotes at start
|
||||||
|
.replace(/\\\\"/g, '\\"'); // Fix double-escaped quotes
|
||||||
|
|
||||||
|
parsed = JSON.parse(jsonContent);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate the response structure
|
||||||
|
const validationResult = SummaryResultSchema.safeParse(parsed);
|
||||||
|
if (!validationResult.success) {
|
||||||
|
logger.warn("Invalid LLM summary response format:", {
|
||||||
|
error: validationResult.error,
|
||||||
|
parsedData: parsed,
|
||||||
|
});
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return validationResult.data;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(
|
||||||
|
"Error parsing LLM summary response:",
|
||||||
|
error as Record<string, unknown>,
|
||||||
|
);
|
||||||
|
logger.debug("Failed response content:", { response });
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function storeSummary(summaryData: SpaceSummaryData): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Store in PostgreSQL for API access and persistence
|
||||||
|
await updateSpace(summaryData);
|
||||||
|
|
||||||
|
// Also store in Neo4j for graph-based queries
|
||||||
|
const query = `
|
||||||
|
MATCH (space:Space {uuid: $spaceId})
|
||||||
|
SET space.summary = $summary,
|
||||||
|
space.keyEntities = $keyEntities,
|
||||||
|
space.themes = $themes,
|
||||||
|
space.summaryConfidence = $confidence,
|
||||||
|
space.summaryContextCount = $contextCount,
|
||||||
|
space.summaryLastUpdated = datetime($lastUpdated)
|
||||||
|
RETURN space
|
||||||
|
`;
|
||||||
|
|
||||||
|
await runQuery(query, {
|
||||||
|
spaceId: summaryData.spaceId,
|
||||||
|
summary: summaryData.summary,
|
||||||
|
keyEntities: summaryData.keyEntities,
|
||||||
|
themes: summaryData.themes,
|
||||||
|
confidence: summaryData.confidence,
|
||||||
|
contextCount: summaryData.contextCount,
|
||||||
|
lastUpdated: summaryData.lastUpdated.toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`Stored summary for space ${summaryData.spaceId}`, {
|
||||||
|
themes: summaryData.themes.length,
|
||||||
|
keyEntities: summaryData.keyEntities.length,
|
||||||
|
confidence: summaryData.confidence,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(
|
||||||
|
`Error storing summary for space ${summaryData.spaceId}:`,
|
||||||
|
error as Record<string, unknown>,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -11,6 +11,7 @@ import {
|
|||||||
type BatchResponse,
|
type BatchResponse,
|
||||||
} from "../types";
|
} from "../types";
|
||||||
import { logger } from "~/services/logger.service";
|
import { logger } from "~/services/logger.service";
|
||||||
|
import { getModelForTask } from "~/lib/model.server";
|
||||||
|
|
||||||
export class OpenAIBatchProvider extends BaseBatchProvider {
|
export class OpenAIBatchProvider extends BaseBatchProvider {
|
||||||
providerName = "openai";
|
providerName = "openai";
|
||||||
@ -40,13 +41,14 @@ export class OpenAIBatchProvider extends BaseBatchProvider {
|
|||||||
try {
|
try {
|
||||||
this.validateRequests(params.requests);
|
this.validateRequests(params.requests);
|
||||||
|
|
||||||
|
const model = getModelForTask(params.modelComplexity || 'high');
|
||||||
// Convert requests to OpenAI batch format
|
// Convert requests to OpenAI batch format
|
||||||
const batchRequests = params.requests.map((request, index) => ({
|
const batchRequests = params.requests.map((request, index) => ({
|
||||||
custom_id: request.customId,
|
custom_id: request.customId,
|
||||||
method: "POST" as const,
|
method: "POST" as const,
|
||||||
url: "/v1/chat/completions",
|
url: "/v1/chat/completions",
|
||||||
body: {
|
body: {
|
||||||
model: process.env.MODEL as string,
|
model,
|
||||||
messages: request.systemPrompt
|
messages: request.systemPrompt
|
||||||
? [
|
? [
|
||||||
{ role: "system" as const, content: request.systemPrompt },
|
{ role: "system" as const, content: request.systemPrompt },
|
||||||
|
|||||||
@ -3,6 +3,7 @@ import { z } from "zod";
|
|||||||
|
|
||||||
export type BatchStatus = "pending" | "processing" | "completed" | "failed" | "cancelled";
|
export type BatchStatus = "pending" | "processing" | "completed" | "failed" | "cancelled";
|
||||||
|
|
||||||
|
export type ModelComplexity = 'high' | 'low';
|
||||||
export interface BatchRequest {
|
export interface BatchRequest {
|
||||||
customId: string;
|
customId: string;
|
||||||
messages: CoreMessage[];
|
messages: CoreMessage[];
|
||||||
@ -39,6 +40,7 @@ export interface CreateBatchParams<T = any> {
|
|||||||
outputSchema?: z.ZodSchema<T>;
|
outputSchema?: z.ZodSchema<T>;
|
||||||
maxRetries?: number;
|
maxRetries?: number;
|
||||||
timeoutMs?: number;
|
timeoutMs?: number;
|
||||||
|
modelComplexity?: ModelComplexity;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface GetBatchParams {
|
export interface GetBatchParams {
|
||||||
|
|||||||
@ -3,13 +3,19 @@ import { IngestionStatus } from "@core/database";
|
|||||||
import { EpisodeType } from "@core/types";
|
import { EpisodeType } from "@core/types";
|
||||||
import { type z } from "zod";
|
import { type z } from "zod";
|
||||||
import { prisma } from "~/db.server";
|
import { prisma } from "~/db.server";
|
||||||
import { type IngestBodyRequest, ingestTask } from "~/trigger/ingest/ingest";
|
import { hasCredits } from "~/services/billing.server";
|
||||||
import { ingestDocumentTask } from "~/trigger/ingest/ingest-document";
|
import { type IngestBodyRequest } from "~/trigger/ingest/ingest";
|
||||||
|
import {
|
||||||
|
enqueueIngestDocument,
|
||||||
|
enqueueIngestEpisode,
|
||||||
|
} from "~/lib/queue-adapter.server";
|
||||||
|
import { trackFeatureUsage } from "~/services/telemetry.server";
|
||||||
|
|
||||||
export const addToQueue = async (
|
export const addToQueue = async (
|
||||||
rawBody: z.infer<typeof IngestBodyRequest>,
|
rawBody: z.infer<typeof IngestBodyRequest>,
|
||||||
userId: string,
|
userId: string,
|
||||||
activityId?: string,
|
activityId?: string,
|
||||||
|
ingestionQueueId?: string,
|
||||||
) => {
|
) => {
|
||||||
const body = { ...rawBody, source: rawBody.source.toLowerCase() };
|
const body = { ...rawBody, source: rawBody.source.toLowerCase() };
|
||||||
const user = await prisma.user.findFirst({
|
const user = await prisma.user.findFirst({
|
||||||
@ -27,9 +33,28 @@ export const addToQueue = async (
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const queuePersist = await prisma.ingestionQueue.create({
|
// Check if workspace has sufficient credits before processing
|
||||||
data: {
|
const hasSufficientCredits = await hasCredits(
|
||||||
spaceId: body.spaceId ? body.spaceId : null,
|
user.Workspace?.id as string,
|
||||||
|
"addEpisode",
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!hasSufficientCredits) {
|
||||||
|
throw new Error("no credits");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert: update existing or create new ingestion queue entry
|
||||||
|
const queuePersist = await prisma.ingestionQueue.upsert({
|
||||||
|
where: {
|
||||||
|
id: ingestionQueueId || "non-existent-id", // Use provided ID or dummy ID to force create
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
data: body,
|
||||||
|
type: body.type,
|
||||||
|
status: IngestionStatus.PENDING,
|
||||||
|
error: null,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
data: body,
|
data: body,
|
||||||
type: body.type,
|
type: body.type,
|
||||||
status: IngestionStatus.PENDING,
|
status: IngestionStatus.PENDING,
|
||||||
@ -41,36 +66,28 @@ export const addToQueue = async (
|
|||||||
|
|
||||||
let handler;
|
let handler;
|
||||||
if (body.type === EpisodeType.DOCUMENT) {
|
if (body.type === EpisodeType.DOCUMENT) {
|
||||||
handler = await ingestDocumentTask.trigger(
|
handler = await enqueueIngestDocument({
|
||||||
{
|
body,
|
||||||
body,
|
userId,
|
||||||
userId,
|
workspaceId: user.Workspace.id,
|
||||||
workspaceId: user.Workspace.id,
|
queueId: queuePersist.id,
|
||||||
queueId: queuePersist.id,
|
});
|
||||||
},
|
|
||||||
{
|
// Track document ingestion
|
||||||
queue: "document-ingestion-queue",
|
trackFeatureUsage("document_ingested", userId).catch(console.error);
|
||||||
concurrencyKey: userId,
|
|
||||||
tags: [user.id, queuePersist.id],
|
|
||||||
},
|
|
||||||
);
|
|
||||||
} else if (body.type === EpisodeType.CONVERSATION) {
|
} else if (body.type === EpisodeType.CONVERSATION) {
|
||||||
handler = await ingestTask.trigger(
|
handler = await enqueueIngestEpisode({
|
||||||
{
|
body,
|
||||||
body,
|
userId,
|
||||||
userId,
|
workspaceId: user.Workspace.id,
|
||||||
workspaceId: user.Workspace.id,
|
queueId: queuePersist.id,
|
||||||
queueId: queuePersist.id,
|
});
|
||||||
},
|
|
||||||
{
|
// Track episode ingestion
|
||||||
queue: "ingestion-queue",
|
trackFeatureUsage("episode_ingested", userId).catch(console.error);
|
||||||
concurrencyKey: userId,
|
|
||||||
tags: [user.id, queuePersist.id],
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return { id: handler?.id, token: handler?.publicAccessToken };
|
return { id: handler?.id, publicAccessToken: handler?.token };
|
||||||
};
|
};
|
||||||
|
|
||||||
export { IngestBodyRequest };
|
export { IngestBodyRequest };
|
||||||
|
|||||||
@ -1,84 +1,189 @@
|
|||||||
import {
|
import { type CoreMessage, embed, generateText, streamText } from "ai";
|
||||||
type CoreMessage,
|
|
||||||
type LanguageModelV1,
|
|
||||||
embed,
|
|
||||||
generateText,
|
|
||||||
streamText,
|
|
||||||
} from "ai";
|
|
||||||
import { openai } from "@ai-sdk/openai";
|
import { openai } from "@ai-sdk/openai";
|
||||||
import { logger } from "~/services/logger.service";
|
import { logger } from "~/services/logger.service";
|
||||||
|
|
||||||
import { createOllama, type OllamaProvider } from "ollama-ai-provider";
|
import { createOllama } from "ollama-ai-provider-v2";
|
||||||
import { anthropic } from "@ai-sdk/anthropic";
|
import { anthropic } from "@ai-sdk/anthropic";
|
||||||
import { google } from "@ai-sdk/google";
|
import { google } from "@ai-sdk/google";
|
||||||
|
|
||||||
|
export type ModelComplexity = "high" | "low";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the appropriate model for a given complexity level.
|
||||||
|
* HIGH complexity uses the configured MODEL.
|
||||||
|
* LOW complexity automatically downgrades to cheaper variants if possible.
|
||||||
|
*/
|
||||||
|
export function getModelForTask(complexity: ModelComplexity = "high"): string {
|
||||||
|
const baseModel = process.env.MODEL || "gpt-4.1-2025-04-14";
|
||||||
|
|
||||||
|
// HIGH complexity - always use the configured model
|
||||||
|
if (complexity === "high") {
|
||||||
|
return baseModel;
|
||||||
|
}
|
||||||
|
|
||||||
|
// LOW complexity - automatically downgrade expensive models to cheaper variants
|
||||||
|
// If already using a cheap model, keep it
|
||||||
|
const downgrades: Record<string, string> = {
|
||||||
|
// OpenAI downgrades
|
||||||
|
"gpt-5-2025-08-07": "gpt-5-mini-2025-08-07",
|
||||||
|
"gpt-4.1-2025-04-14": "gpt-4.1-mini-2025-04-14",
|
||||||
|
|
||||||
|
// Anthropic downgrades
|
||||||
|
"claude-sonnet-4-5": "claude-3-5-haiku-20241022",
|
||||||
|
"claude-3-7-sonnet-20250219": "claude-3-5-haiku-20241022",
|
||||||
|
"claude-3-opus-20240229": "claude-3-5-haiku-20241022",
|
||||||
|
|
||||||
|
// Google downgrades
|
||||||
|
"gemini-2.5-pro-preview-03-25": "gemini-2.5-flash-preview-04-17",
|
||||||
|
"gemini-2.0-flash": "gemini-2.0-flash-lite",
|
||||||
|
|
||||||
|
// AWS Bedrock downgrades (keep same model - already cost-optimized)
|
||||||
|
"us.amazon.nova-premier-v1:0": "us.amazon.nova-premier-v1:0",
|
||||||
|
};
|
||||||
|
|
||||||
|
return downgrades[baseModel] || baseModel;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getModel = (takeModel?: string) => {
|
||||||
|
let model = takeModel;
|
||||||
|
|
||||||
|
const anthropicKey = process.env.ANTHROPIC_API_KEY;
|
||||||
|
const googleKey = process.env.GOOGLE_GENERATIVE_AI_API_KEY;
|
||||||
|
const openaiKey = process.env.OPENAI_API_KEY;
|
||||||
|
let ollamaUrl = process.env.OLLAMA_URL;
|
||||||
|
model = model || process.env.MODEL || "gpt-4.1-2025-04-14";
|
||||||
|
|
||||||
|
let modelInstance;
|
||||||
|
let modelTemperature = Number(process.env.MODEL_TEMPERATURE) || 1;
|
||||||
|
ollamaUrl = undefined;
|
||||||
|
|
||||||
|
// First check if Ollama URL exists and use Ollama
|
||||||
|
if (ollamaUrl) {
|
||||||
|
const ollama = createOllama({
|
||||||
|
baseURL: ollamaUrl,
|
||||||
|
});
|
||||||
|
modelInstance = ollama(model || "llama2"); // Default to llama2 if no model specified
|
||||||
|
} else {
|
||||||
|
// If no Ollama, check other models
|
||||||
|
|
||||||
|
if (model.includes("claude")) {
|
||||||
|
if (!anthropicKey) {
|
||||||
|
throw new Error("No Anthropic API key found. Set ANTHROPIC_API_KEY");
|
||||||
|
}
|
||||||
|
modelInstance = anthropic(model);
|
||||||
|
modelTemperature = 0.5;
|
||||||
|
} else if (model.includes("gemini")) {
|
||||||
|
if (!googleKey) {
|
||||||
|
throw new Error("No Google API key found. Set GOOGLE_API_KEY");
|
||||||
|
}
|
||||||
|
modelInstance = google(model);
|
||||||
|
} else {
|
||||||
|
if (!openaiKey) {
|
||||||
|
throw new Error("No OpenAI API key found. Set OPENAI_API_KEY");
|
||||||
|
}
|
||||||
|
modelInstance = openai(model);
|
||||||
|
}
|
||||||
|
|
||||||
|
return modelInstance;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface TokenUsage {
|
||||||
|
promptTokens?: number;
|
||||||
|
completionTokens?: number;
|
||||||
|
totalTokens?: number;
|
||||||
|
}
|
||||||
|
|
||||||
export async function makeModelCall(
|
export async function makeModelCall(
|
||||||
stream: boolean,
|
stream: boolean,
|
||||||
messages: CoreMessage[],
|
messages: CoreMessage[],
|
||||||
onFinish: (text: string, model: string) => void,
|
onFinish: (text: string, model: string, usage?: TokenUsage) => void,
|
||||||
options?: any,
|
options?: any,
|
||||||
|
complexity: ModelComplexity = "high",
|
||||||
) {
|
) {
|
||||||
let modelInstance;
|
let model = getModelForTask(complexity);
|
||||||
const model = process.env.MODEL as any;
|
logger.info(`complexity: ${complexity}, model: ${model}`);
|
||||||
const ollamaUrl = process.env.OLLAMA_URL;
|
|
||||||
let ollama: OllamaProvider | undefined;
|
|
||||||
|
|
||||||
if (ollamaUrl) {
|
const modelInstance = getModel(model);
|
||||||
ollama = createOllama({
|
const generateTextOptions: any = {};
|
||||||
baseURL: ollamaUrl,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (model) {
|
if (!modelInstance) {
|
||||||
case "gpt-4.1-2025-04-14":
|
throw new Error(`Unsupported model type: ${model}`);
|
||||||
case "gpt-4.1-mini-2025-04-14":
|
|
||||||
case "gpt-5-mini-2025-08-07":
|
|
||||||
case "gpt-5-2025-08-07":
|
|
||||||
case "gpt-4.1-nano-2025-04-14":
|
|
||||||
modelInstance = openai(model, { ...options });
|
|
||||||
break;
|
|
||||||
|
|
||||||
case "claude-3-7-sonnet-20250219":
|
|
||||||
case "claude-3-opus-20240229":
|
|
||||||
case "claude-3-5-haiku-20241022":
|
|
||||||
modelInstance = anthropic(model, { ...options });
|
|
||||||
break;
|
|
||||||
|
|
||||||
case "gemini-2.5-flash-preview-04-17":
|
|
||||||
case "gemini-2.5-pro-preview-03-25":
|
|
||||||
case "gemini-2.0-flash":
|
|
||||||
case "gemini-2.0-flash-lite":
|
|
||||||
modelInstance = google(model, { ...options });
|
|
||||||
break;
|
|
||||||
|
|
||||||
default:
|
|
||||||
if (ollama) {
|
|
||||||
modelInstance = ollama(model);
|
|
||||||
}
|
|
||||||
logger.warn(`Unsupported model type: ${model}`);
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (stream) {
|
if (stream) {
|
||||||
return streamText({
|
return streamText({
|
||||||
model: modelInstance as LanguageModelV1,
|
model: modelInstance,
|
||||||
messages,
|
messages,
|
||||||
onFinish: async ({ text }) => {
|
...options,
|
||||||
onFinish(text, model);
|
...generateTextOptions,
|
||||||
|
onFinish: async ({ text, usage }) => {
|
||||||
|
const tokenUsage = usage
|
||||||
|
? {
|
||||||
|
promptTokens: usage.inputTokens,
|
||||||
|
completionTokens: usage.outputTokens,
|
||||||
|
totalTokens: usage.totalTokens,
|
||||||
|
}
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
if (tokenUsage) {
|
||||||
|
logger.log(
|
||||||
|
`[${complexity.toUpperCase()}] ${model} - Tokens: ${tokenUsage.totalTokens} (prompt: ${tokenUsage.promptTokens}, completion: ${tokenUsage.completionTokens})`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
onFinish(text, model, tokenUsage);
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const { text } = await generateText({
|
const { text, usage } = await generateText({
|
||||||
model: modelInstance as LanguageModelV1,
|
model: modelInstance,
|
||||||
messages,
|
messages,
|
||||||
|
...generateTextOptions,
|
||||||
});
|
});
|
||||||
|
|
||||||
onFinish(text, model);
|
const tokenUsage = usage
|
||||||
|
? {
|
||||||
|
promptTokens: usage.inputTokens,
|
||||||
|
completionTokens: usage.outputTokens,
|
||||||
|
totalTokens: usage.totalTokens,
|
||||||
|
}
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
if (tokenUsage) {
|
||||||
|
logger.log(
|
||||||
|
`[${complexity.toUpperCase()}] ${model} - Tokens: ${tokenUsage.totalTokens} (prompt: ${tokenUsage.promptTokens}, completion: ${tokenUsage.completionTokens})`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
onFinish(text, model, tokenUsage);
|
||||||
|
|
||||||
return text;
|
return text;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determines if a given model is proprietary (OpenAI, Anthropic, Google, Grok)
|
||||||
|
* or open source (accessed via Bedrock, Ollama, etc.)
|
||||||
|
*/
|
||||||
|
export function isProprietaryModel(
|
||||||
|
modelName?: string,
|
||||||
|
complexity: ModelComplexity = "high",
|
||||||
|
): boolean {
|
||||||
|
const model = modelName || getModelForTask(complexity);
|
||||||
|
if (!model) return false;
|
||||||
|
|
||||||
|
// Proprietary model patterns
|
||||||
|
const proprietaryPatterns = [
|
||||||
|
/^gpt-/, // OpenAI models
|
||||||
|
/^claude-/, // Anthropic models
|
||||||
|
/^gemini-/, // Google models
|
||||||
|
/^grok-/, // xAI models
|
||||||
|
];
|
||||||
|
|
||||||
|
return proprietaryPatterns.some((pattern) => pattern.test(model));
|
||||||
|
}
|
||||||
|
|
||||||
export async function getEmbedding(text: string) {
|
export async function getEmbedding(text: string) {
|
||||||
const ollamaUrl = process.env.OLLAMA_URL;
|
const ollamaUrl = process.env.OLLAMA_URL;
|
||||||
|
|
||||||
|
|||||||
@ -5,6 +5,7 @@ import { singleton } from "~/utils/singleton";
|
|||||||
|
|
||||||
// Create a singleton driver instance
|
// Create a singleton driver instance
|
||||||
const driver = singleton("neo4j", getDriver);
|
const driver = singleton("neo4j", getDriver);
|
||||||
|
const EMBEDDING_MODEL_SIZE = process.env.EMBEDDING_MODEL_SIZE ?? "1024";
|
||||||
|
|
||||||
function getDriver() {
|
function getDriver() {
|
||||||
return neo4j.driver(
|
return neo4j.driver(
|
||||||
@ -111,48 +112,31 @@ export const getNodeLinks = async (userId: string) => {
|
|||||||
export const getClusteredGraphData = async (userId: string) => {
|
export const getClusteredGraphData = async (userId: string) => {
|
||||||
const session = driver.session();
|
const session = driver.session();
|
||||||
try {
|
try {
|
||||||
// Get the proper reified graph structure: Entity -> Statement -> Entity
|
// Get Episode -> Entity graph, only showing entities connected to more than 1 episode
|
||||||
const result = await session.run(
|
const result = await session.run(
|
||||||
`// Get all statements and their entity connections for reified graph
|
`// Find entities connected to more than 1 episode
|
||||||
MATCH (s:Statement)
|
MATCH (e:Episode{userId: $userId})-[:HAS_PROVENANCE]->(s:Statement {userId: $userId})-[r:HAS_SUBJECT|HAS_OBJECT|HAS_PREDICATE]->(entity:Entity)
|
||||||
WHERE s.userId = $userId
|
WITH entity, count(DISTINCT e) as episodeCount
|
||||||
|
WHERE episodeCount > 1
|
||||||
// Get all entities connected to each statement
|
WITH collect(entity.uuid) as validEntityUuids
|
||||||
MATCH (s)-[:HAS_SUBJECT]->(subj:Entity)
|
|
||||||
MATCH (s)-[:HAS_PREDICATE]->(pred:Entity)
|
// Build Episode -> Entity relationships for valid entities
|
||||||
MATCH (s)-[:HAS_OBJECT]->(obj:Entity)
|
MATCH (e:Episode{userId: $userId})-[r:HAS_PROVENANCE]->(s:Statement {userId: $userId})-[r:HAS_SUBJECT|HAS_OBJECT|HAS_PREDICATE]->(entity:Entity)
|
||||||
|
WHERE entity.uuid IN validEntityUuids
|
||||||
// Return both Entity->Statement and Statement->Entity relationships
|
WITH DISTINCT e, entity, type(r) as relType,
|
||||||
WITH s, subj, pred, obj
|
CASE WHEN size(e.spaceIds) > 0 THEN e.spaceIds[0] ELSE null END as clusterId,
|
||||||
UNWIND [
|
s.createdAt as createdAt
|
||||||
// Subject Entity -> Statement
|
|
||||||
{source: subj, target: s, type: 'HAS_SUBJECT', isEntityToStatement: true},
|
RETURN DISTINCT
|
||||||
// Statement -> Predicate Entity
|
e.uuid as sourceUuid,
|
||||||
{source: s, target: pred, type: 'HAS_PREDICATE', isStatementToEntity: true},
|
e.content as sourceContent,
|
||||||
// Statement -> Object Entity
|
'Episode' as sourceNodeType,
|
||||||
{source: s, target: obj, type: 'HAS_OBJECT', isStatementToEntity: true}
|
entity.uuid as targetUuid,
|
||||||
] AS rel
|
entity.name as targetName,
|
||||||
|
'Entity' as targetNodeType,
|
||||||
RETURN DISTINCT
|
relType as edgeType,
|
||||||
rel.source.uuid as sourceUuid,
|
clusterId,
|
||||||
rel.source.name as sourceName,
|
createdAt`,
|
||||||
rel.source.labels as sourceLabels,
|
|
||||||
rel.source.type as sourceType,
|
|
||||||
rel.source.properties as sourceProperties,
|
|
||||||
rel.target.uuid as targetUuid,
|
|
||||||
rel.target.name as targetName,
|
|
||||||
rel.target.type as targetType,
|
|
||||||
rel.target.labels as targetLabels,
|
|
||||||
rel.target.properties as targetProperties,
|
|
||||||
rel.type as relationshipType,
|
|
||||||
s.uuid as statementUuid,
|
|
||||||
s.spaceIds as spaceIds,
|
|
||||||
s.fact as fact,
|
|
||||||
s.invalidAt as invalidAt,
|
|
||||||
s.validAt as validAt,
|
|
||||||
s.createdAt as createdAt,
|
|
||||||
rel.isEntityToStatement as isEntityToStatement,
|
|
||||||
rel.isStatementToEntity as isStatementToEntity`,
|
|
||||||
{ userId },
|
{ userId },
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -161,92 +145,49 @@ export const getClusteredGraphData = async (userId: string) => {
|
|||||||
|
|
||||||
result.records.forEach((record) => {
|
result.records.forEach((record) => {
|
||||||
const sourceUuid = record.get("sourceUuid");
|
const sourceUuid = record.get("sourceUuid");
|
||||||
const sourceName = record.get("sourceName");
|
const sourceContent = record.get("sourceContent");
|
||||||
const sourceType = record.get("sourceType");
|
|
||||||
const sourceLabels = record.get("sourceLabels") || [];
|
|
||||||
const sourceProperties = record.get("sourceProperties") || {};
|
|
||||||
|
|
||||||
const targetUuid = record.get("targetUuid");
|
const targetUuid = record.get("targetUuid");
|
||||||
const targetName = record.get("targetName");
|
const targetName = record.get("targetName");
|
||||||
const targetLabels = record.get("targetLabels") || [];
|
const edgeType = record.get("edgeType");
|
||||||
const targetProperties = record.get("targetProperties") || {};
|
const clusterId = record.get("clusterId");
|
||||||
const targetType = record.get("targetType");
|
|
||||||
|
|
||||||
const relationshipType = record.get("relationshipType");
|
|
||||||
const statementUuid = record.get("statementUuid");
|
|
||||||
const clusterIds = record.get("spaceIds");
|
|
||||||
const clusterId = clusterIds ? clusterIds[0] : undefined;
|
|
||||||
const fact = record.get("fact");
|
|
||||||
const invalidAt = record.get("invalidAt");
|
|
||||||
const validAt = record.get("validAt");
|
|
||||||
const createdAt = record.get("createdAt");
|
const createdAt = record.get("createdAt");
|
||||||
|
|
||||||
// Create unique edge identifier to avoid duplicates
|
// Create unique edge identifier to avoid duplicates
|
||||||
const edgeKey = `${sourceUuid}-${targetUuid}-${relationshipType}`;
|
const edgeKey = `${sourceUuid}-${targetUuid}-${edgeType}`;
|
||||||
if (processedEdges.has(edgeKey)) return;
|
if (processedEdges.has(edgeKey)) return;
|
||||||
processedEdges.add(edgeKey);
|
processedEdges.add(edgeKey);
|
||||||
|
|
||||||
// Determine node types and add appropriate cluster information
|
|
||||||
const isSourceStatement =
|
|
||||||
sourceLabels.includes("Statement") || sourceUuid === statementUuid;
|
|
||||||
const isTargetStatement =
|
|
||||||
targetLabels.includes("Statement") || targetUuid === statementUuid;
|
|
||||||
|
|
||||||
// Statement nodes get cluster info, Entity nodes get default attributes
|
|
||||||
const sourceAttributes = isSourceStatement
|
|
||||||
? {
|
|
||||||
...sourceProperties,
|
|
||||||
clusterId,
|
|
||||||
nodeType: "Statement",
|
|
||||||
fact,
|
|
||||||
invalidAt,
|
|
||||||
validAt,
|
|
||||||
}
|
|
||||||
: {
|
|
||||||
...sourceProperties,
|
|
||||||
nodeType: "Entity",
|
|
||||||
type: sourceType,
|
|
||||||
name: sourceName,
|
|
||||||
};
|
|
||||||
|
|
||||||
const targetAttributes = isTargetStatement
|
|
||||||
? {
|
|
||||||
...targetProperties,
|
|
||||||
clusterId,
|
|
||||||
nodeType: "Statement",
|
|
||||||
fact,
|
|
||||||
invalidAt,
|
|
||||||
validAt,
|
|
||||||
}
|
|
||||||
: {
|
|
||||||
...targetProperties,
|
|
||||||
nodeType: "Entity",
|
|
||||||
type: targetType,
|
|
||||||
name: targetName,
|
|
||||||
};
|
|
||||||
|
|
||||||
triplets.push({
|
triplets.push({
|
||||||
sourceNode: {
|
sourceNode: {
|
||||||
uuid: sourceUuid,
|
uuid: sourceUuid,
|
||||||
labels: sourceLabels,
|
labels: ["Episode"],
|
||||||
attributes: sourceAttributes,
|
attributes: {
|
||||||
name: isSourceStatement ? fact : sourceName || sourceUuid,
|
nodeType: "Episode",
|
||||||
|
content: sourceContent,
|
||||||
|
episodeUuid: sourceUuid,
|
||||||
|
clusterId,
|
||||||
|
},
|
||||||
|
name: sourceContent || sourceUuid,
|
||||||
clusterId,
|
clusterId,
|
||||||
createdAt: createdAt || "",
|
createdAt: createdAt || "",
|
||||||
},
|
},
|
||||||
edge: {
|
edge: {
|
||||||
uuid: `${sourceUuid}-${targetUuid}-${relationshipType}`,
|
uuid: `${sourceUuid}-${targetUuid}-${edgeType}`,
|
||||||
type: relationshipType,
|
type: edgeType,
|
||||||
source_node_uuid: sourceUuid,
|
source_node_uuid: sourceUuid,
|
||||||
target_node_uuid: targetUuid,
|
target_node_uuid: targetUuid,
|
||||||
createdAt: createdAt || "",
|
createdAt: createdAt || "",
|
||||||
},
|
},
|
||||||
targetNode: {
|
targetNode: {
|
||||||
uuid: targetUuid,
|
uuid: targetUuid,
|
||||||
labels: targetLabels,
|
labels: ["Entity"],
|
||||||
attributes: targetAttributes,
|
attributes: {
|
||||||
|
nodeType: "Entity",
|
||||||
|
name: targetName,
|
||||||
|
clusterId,
|
||||||
|
},
|
||||||
|
name: targetName || targetUuid,
|
||||||
clusterId,
|
clusterId,
|
||||||
name: isTargetStatement ? fact : targetName || targetUuid,
|
|
||||||
createdAt: createdAt || "",
|
createdAt: createdAt || "",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
@ -373,17 +314,17 @@ const initializeSchema = async () => {
|
|||||||
// Create vector indexes for semantic search (if using Neo4j 5.0+)
|
// Create vector indexes for semantic search (if using Neo4j 5.0+)
|
||||||
await runQuery(`
|
await runQuery(`
|
||||||
CREATE VECTOR INDEX entity_embedding IF NOT EXISTS FOR (n:Entity) ON n.nameEmbedding
|
CREATE VECTOR INDEX entity_embedding IF NOT EXISTS FOR (n:Entity) ON n.nameEmbedding
|
||||||
OPTIONS {indexConfig: {\`vector.dimensions\`: 1024, \`vector.similarity_function\`: 'cosine', \`vector.hnsw.ef_construction\`: 400, \`vector.hnsw.m\`: 32}}
|
OPTIONS {indexConfig: {\`vector.dimensions\`: ${EMBEDDING_MODEL_SIZE}, \`vector.similarity_function\`: 'cosine', \`vector.hnsw.ef_construction\`: 400, \`vector.hnsw.m\`: 32}}
|
||||||
`);
|
`);
|
||||||
|
|
||||||
await runQuery(`
|
await runQuery(`
|
||||||
CREATE VECTOR INDEX statement_embedding IF NOT EXISTS FOR (n:Statement) ON n.factEmbedding
|
CREATE VECTOR INDEX statement_embedding IF NOT EXISTS FOR (n:Statement) ON n.factEmbedding
|
||||||
OPTIONS {indexConfig: {\`vector.dimensions\`: 1024, \`vector.similarity_function\`: 'cosine', \`vector.hnsw.ef_construction\`: 400, \`vector.hnsw.m\`: 32}}
|
OPTIONS {indexConfig: {\`vector.dimensions\`: ${EMBEDDING_MODEL_SIZE}, \`vector.similarity_function\`: 'cosine', \`vector.hnsw.ef_construction\`: 400, \`vector.hnsw.m\`: 32}}
|
||||||
`);
|
`);
|
||||||
|
|
||||||
await runQuery(`
|
await runQuery(`
|
||||||
CREATE VECTOR INDEX episode_embedding IF NOT EXISTS FOR (n:Episode) ON n.contentEmbedding
|
CREATE VECTOR INDEX episode_embedding IF NOT EXISTS FOR (n:Episode) ON n.contentEmbedding
|
||||||
OPTIONS {indexConfig: {\`vector.dimensions\`: 1024, \`vector.similarity_function\`: 'cosine', \`vector.hnsw.ef_construction\`: 400, \`vector.hnsw.m\`: 32}}
|
OPTIONS {indexConfig: {\`vector.dimensions\`: ${EMBEDDING_MODEL_SIZE}, \`vector.similarity_function\`: 'cosine', \`vector.hnsw.ef_construction\`: 400, \`vector.hnsw.m\`: 32}}
|
||||||
`);
|
`);
|
||||||
|
|
||||||
// Create fulltext indexes for BM25 search
|
// Create fulltext indexes for BM25 search
|
||||||
|
|||||||
324
apps/webapp/app/lib/prompt.server.ts
Normal file
324
apps/webapp/app/lib/prompt.server.ts
Normal file
@ -0,0 +1,324 @@
|
|||||||
|
import { type StopCondition } from "ai";
|
||||||
|
|
||||||
|
export const hasAnswer: StopCondition<any> = ({ steps }) => {
|
||||||
|
return (
|
||||||
|
steps.some((step) => step.text?.includes("</final_response>")) ?? false
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const hasQuestion: StopCondition<any> = ({ steps }) => {
|
||||||
|
return (
|
||||||
|
steps.some((step) => step.text?.includes("</question_response>")) ?? false
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const REACT_SYSTEM_PROMPT = `
|
||||||
|
You are a helpful AI assistant with access to user memory. Your primary capabilities are:
|
||||||
|
|
||||||
|
1. **Memory-First Approach**: Always check user memory first to understand context and previous interactions
|
||||||
|
2. **Intelligent Information Gathering**: Analyze queries to determine if current information is needed
|
||||||
|
3. **Memory Management**: Help users store, retrieve, and organize information in their memory
|
||||||
|
4. **Contextual Assistance**: Use memory to provide personalized and contextual responses
|
||||||
|
|
||||||
|
<information_gathering>
|
||||||
|
Follow this intelligent approach for information gathering:
|
||||||
|
|
||||||
|
1. **MEMORY FIRST** (Always Required)
|
||||||
|
- Always check memory FIRST using core--search_memory before any other actions
|
||||||
|
- Consider this your highest priority for EVERY interaction - as essential as breathing
|
||||||
|
- Memory provides context, personal preferences, and historical information
|
||||||
|
- Use memory to understand user's background, ongoing projects, and past conversations
|
||||||
|
|
||||||
|
2. **INFORMATION SYNTHESIS** (Combine Sources)
|
||||||
|
- Use memory to personalize current information based on user preferences
|
||||||
|
- Always store new useful information in memory using core--add_memory
|
||||||
|
|
||||||
|
3. **TRAINING KNOWLEDGE** (Foundation)
|
||||||
|
- Use your training knowledge as the foundation for analysis and explanation
|
||||||
|
- Apply training knowledge to interpret and contextualize information from memory
|
||||||
|
- Indicate when you're using training knowledge vs. live information sources
|
||||||
|
|
||||||
|
EXECUTION APPROACH:
|
||||||
|
- Memory search is mandatory for every interaction
|
||||||
|
- Always indicate your information sources in responses
|
||||||
|
</information_gathering>
|
||||||
|
|
||||||
|
<memory>
|
||||||
|
QUERY FORMATION:
|
||||||
|
- Write specific factual statements as queries (e.g., "user email address" not "what is the user's email?")
|
||||||
|
- Create multiple targeted memory queries for complex requests
|
||||||
|
|
||||||
|
KEY QUERY AREAS:
|
||||||
|
- Personal context: user name, location, identity, work context
|
||||||
|
- Project context: repositories, codebases, current work, team members
|
||||||
|
- Task context: recent tasks, ongoing projects, deadlines, priorities
|
||||||
|
- Integration context: GitHub repos, Slack channels, Linear projects, connected services
|
||||||
|
- Communication patterns: email preferences, notification settings, workflow automation
|
||||||
|
- Technical context: coding languages, frameworks, development environment
|
||||||
|
- Collaboration context: team members, project stakeholders, meeting patterns
|
||||||
|
- Preferences: likes, dislikes, communication style, tool preferences
|
||||||
|
- History: previous discussions, past requests, completed work, recurring issues
|
||||||
|
- Automation rules: user-defined workflows, triggers, automation preferences
|
||||||
|
|
||||||
|
MEMORY USAGE:
|
||||||
|
- Execute multiple memory queries in parallel rather than sequentially
|
||||||
|
- Batch related memory queries when possible
|
||||||
|
- Prioritize recent information over older memories
|
||||||
|
- Create comprehensive context-aware queries based on user message/activity content
|
||||||
|
- Extract and query SEMANTIC CONTENT, not just structural metadata
|
||||||
|
- Parse titles, descriptions, and content for actual subject matter keywords
|
||||||
|
- Search internal SOL tasks/conversations that may relate to the same topics
|
||||||
|
- Query ALL relatable concepts, not just direct keywords or IDs
|
||||||
|
- Search for similar past situations, patterns, and related work
|
||||||
|
- Include synonyms, related terms, and contextual concepts in queries
|
||||||
|
- Query user's historical approach to similar requests or activities
|
||||||
|
- Search for connected projects, tasks, conversations, and collaborations
|
||||||
|
- Retrieve workflow patterns and past decision-making context
|
||||||
|
- Query broader domain context beyond immediate request scope
|
||||||
|
- Remember: SOL tracks work that external tools don't - search internal content thoroughly
|
||||||
|
- Blend memory insights naturally into responses
|
||||||
|
- Verify you've checked relevant memory before finalizing ANY response
|
||||||
|
|
||||||
|
</memory>
|
||||||
|
|
||||||
|
<external_services>
|
||||||
|
- To use: load_mcp with EXACT integration name from the available list
|
||||||
|
- Can load multiple at once with an array
|
||||||
|
- Only load when tools are NOT already available in your current toolset
|
||||||
|
- If a tool is already available, use it directly without load_mcp
|
||||||
|
- If requested integration unavailable: inform user politely
|
||||||
|
</external_services>
|
||||||
|
|
||||||
|
<tool_calling>
|
||||||
|
You have tools at your disposal to assist users:
|
||||||
|
|
||||||
|
CORE PRINCIPLES:
|
||||||
|
- Use tools only when necessary for the task at hand
|
||||||
|
- Always check memory FIRST before making other tool calls
|
||||||
|
- Execute multiple operations in parallel whenever possible
|
||||||
|
- Use sequential calls only when output of one is required for input of another
|
||||||
|
|
||||||
|
PARAMETER HANDLING:
|
||||||
|
- Follow tool schemas exactly with all required parameters
|
||||||
|
- Only use values that are:
|
||||||
|
• Explicitly provided by the user (use EXACTLY as given)
|
||||||
|
• Reasonably inferred from context
|
||||||
|
• Retrieved from memory or prior tool calls
|
||||||
|
- Never make up values for required parameters
|
||||||
|
- Omit optional parameters unless clearly needed
|
||||||
|
- Analyze user's descriptive terms for parameter clues
|
||||||
|
|
||||||
|
TOOL SELECTION:
|
||||||
|
- Never call tools not provided in this conversation
|
||||||
|
- Skip tool calls for general questions you can answer directly from memory/knowledge
|
||||||
|
- For identical operations on multiple items, use parallel tool calls
|
||||||
|
- Default to parallel execution (3-5× faster than sequential calls)
|
||||||
|
- You can always access external service tools by loading them with load_mcp first
|
||||||
|
|
||||||
|
TOOL MENTION HANDLING:
|
||||||
|
When user message contains <mention data-id="tool_name" data-label="tool"></mention>:
|
||||||
|
- Extract tool_name from data-id attribute
|
||||||
|
- First check if it's a built-in tool; if not, check EXTERNAL SERVICES TOOLS
|
||||||
|
- If available: Load it with load_mcp and focus on addressing the request with this tool
|
||||||
|
- If unavailable: Inform user and suggest alternatives if possible
|
||||||
|
- For multiple tool mentions: Load all applicable tools in a single load_mcp call
|
||||||
|
|
||||||
|
ERROR HANDLING:
|
||||||
|
- If a tool returns an error, try fixing parameters before retrying
|
||||||
|
- If you can't resolve an error, explain the issue to the user
|
||||||
|
- Consider alternative tools when primary tools are unavailable
|
||||||
|
</tool_calling>
|
||||||
|
|
||||||
|
<communication>
|
||||||
|
Use EXACTLY ONE of these formats for all user-facing communication:
|
||||||
|
|
||||||
|
PROGRESS UPDATES - During processing:
|
||||||
|
- Use the core--progress_update tool to keep users informed
|
||||||
|
- Update users about what you're discovering or doing next
|
||||||
|
- Keep messages clear and user-friendly
|
||||||
|
- Avoid technical jargon
|
||||||
|
|
||||||
|
QUESTIONS - When you need information:
|
||||||
|
<question_response>
|
||||||
|
<p>[Your question with HTML formatting]</p>
|
||||||
|
</question_response>
|
||||||
|
|
||||||
|
- Ask questions only when you cannot find information through memory, or tools
|
||||||
|
- Be specific about what you need to know
|
||||||
|
- Provide context for why you're asking
|
||||||
|
|
||||||
|
FINAL ANSWERS - When completing tasks:
|
||||||
|
<final_response>
|
||||||
|
<p>[Your answer with HTML formatting]</p>
|
||||||
|
</final_response>
|
||||||
|
|
||||||
|
CRITICAL:
|
||||||
|
- Use ONE format per turn
|
||||||
|
- Apply proper HTML formatting (<h1>, <h2>, <p>, <ul>, <li>, etc.)
|
||||||
|
- Never mix communication formats
|
||||||
|
- Keep responses clear and helpful
|
||||||
|
- Always indicate your information sources (memory, and/or knowledge)
|
||||||
|
</communication>
|
||||||
|
`;
|
||||||
|
|
||||||
|
export function getReActPrompt(
|
||||||
|
metadata?: { source?: string; url?: string; pageTitle?: string },
|
||||||
|
intentOverride?: string,
|
||||||
|
): string {
|
||||||
|
const contextHints = [];
|
||||||
|
|
||||||
|
if (
|
||||||
|
metadata?.source === "chrome" &&
|
||||||
|
metadata?.url?.includes("mail.google.com")
|
||||||
|
) {
|
||||||
|
contextHints.push("Content is from email - likely reading intent");
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
metadata?.source === "chrome" &&
|
||||||
|
metadata?.url?.includes("calendar.google.com")
|
||||||
|
) {
|
||||||
|
contextHints.push("Content is from calendar - likely meeting prep intent");
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
metadata?.source === "chrome" &&
|
||||||
|
metadata?.url?.includes("docs.google.com")
|
||||||
|
) {
|
||||||
|
contextHints.push(
|
||||||
|
"Content is from document editor - likely writing intent",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (metadata?.source === "obsidian") {
|
||||||
|
contextHints.push(
|
||||||
|
"Content is from note editor - likely writing or research intent",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return `You are a memory research agent analyzing content to find relevant context.
|
||||||
|
|
||||||
|
YOUR PROCESS (ReAct Framework):
|
||||||
|
|
||||||
|
1. DECOMPOSE: First, break down the content into structured categories
|
||||||
|
|
||||||
|
Analyze the content and extract:
|
||||||
|
a) ENTITIES: Specific people, project names, tools, products mentioned
|
||||||
|
Example: "John Smith", "Phoenix API", "Redis", "mobile app"
|
||||||
|
|
||||||
|
b) TOPICS & CONCEPTS: Key subjects, themes, domains
|
||||||
|
Example: "authentication", "database design", "performance optimization"
|
||||||
|
|
||||||
|
c) TEMPORAL MARKERS: Time references, deadlines, events
|
||||||
|
Example: "last week's meeting", "Q2 launch", "yesterday's discussion"
|
||||||
|
|
||||||
|
d) ACTIONS & TASKS: What's being done, decided, or requested
|
||||||
|
Example: "implement feature", "review code", "make decision on"
|
||||||
|
|
||||||
|
e) USER INTENT: What is the user trying to accomplish?
|
||||||
|
${intentOverride ? `User specified: "${intentOverride}"` : "Infer from context: reading/writing/meeting prep/research/task tracking/review"}
|
||||||
|
|
||||||
|
2. FORM QUERIES: Create targeted search queries from your decomposition
|
||||||
|
|
||||||
|
Based on decomposition, form specific queries:
|
||||||
|
- Search for each entity by name (people, projects, tools)
|
||||||
|
- Search for topics the user has discussed before
|
||||||
|
- Search for related work or conversations in this domain
|
||||||
|
- Use the user's actual terminology, not generic concepts
|
||||||
|
|
||||||
|
EXAMPLE - Content: "Email from Sarah about the API redesign we discussed last week"
|
||||||
|
Decomposition:
|
||||||
|
- Entities: "Sarah", "API redesign"
|
||||||
|
- Topics: "API design", "redesign"
|
||||||
|
- Temporal: "last week"
|
||||||
|
- Actions: "discussed", "email communication"
|
||||||
|
- Intent: Reading (email) / meeting prep
|
||||||
|
|
||||||
|
Queries to form:
|
||||||
|
✅ "Sarah" (find past conversations with Sarah)
|
||||||
|
✅ "API redesign" or "API design" (find project discussions)
|
||||||
|
✅ "last week" + "Sarah" (find recent context)
|
||||||
|
✅ "meetings" or "discussions" (find related conversations)
|
||||||
|
|
||||||
|
❌ Avoid: "email communication patterns", "API architecture philosophy"
|
||||||
|
(These are abstract - search what user actually discussed!)
|
||||||
|
|
||||||
|
3. SEARCH: Execute your queries using searchMemory tool
|
||||||
|
- Start with 2-3 core searches based on main entities/topics
|
||||||
|
- Make each search specific and targeted
|
||||||
|
- Use actual terms from the content, not rephrased concepts
|
||||||
|
|
||||||
|
4. OBSERVE: Evaluate search results
|
||||||
|
- Did you find relevant episodes? How many unique ones?
|
||||||
|
- What specific context emerged?
|
||||||
|
- What new entities/topics appeared in results?
|
||||||
|
- Are there gaps in understanding?
|
||||||
|
- Should you search more angles?
|
||||||
|
|
||||||
|
Note: Episode counts are automatically deduplicated across searches - overlapping episodes are only counted once.
|
||||||
|
|
||||||
|
5. REACT: Decide next action based on observations
|
||||||
|
|
||||||
|
STOPPING CRITERIA - Proceed to SYNTHESIZE if ANY of these are true:
|
||||||
|
- You found 20+ unique episodes across your searches → ENOUGH CONTEXT
|
||||||
|
- You performed 5+ searches and found relevant episodes → SUFFICIENT
|
||||||
|
- You performed 7+ searches regardless of results → EXHAUSTED STRATEGIES
|
||||||
|
- You found strong relevant context from multiple angles → COMPLETE
|
||||||
|
|
||||||
|
System nudges will provide awareness of your progress, but you decide when synthesis quality would be optimal.
|
||||||
|
|
||||||
|
If you found little/no context AND searched less than 7 times:
|
||||||
|
- Try different query angles from your decomposition
|
||||||
|
- Search broader related topics
|
||||||
|
- Search user's projects or work areas
|
||||||
|
- Try alternative terminology
|
||||||
|
|
||||||
|
⚠️ DO NOT search endlessly - if you found relevant episodes, STOP and synthesize!
|
||||||
|
|
||||||
|
6. SYNTHESIZE: After gathering sufficient context, provide final answer
|
||||||
|
- Wrap your synthesis in <final_response> tags
|
||||||
|
- Present direct factual context from memory - no meta-commentary
|
||||||
|
- Write as if providing background context to an AI assistant
|
||||||
|
- Include: facts, decisions, preferences, patterns, timelines
|
||||||
|
- Note any gaps, contradictions, or evolution in thinking
|
||||||
|
- Keep it concise and actionable
|
||||||
|
- DO NOT use phrases like "Previous discussions on", "From conversations", "Past preferences indicate"
|
||||||
|
- DO NOT use conversational language like "you said" or "you mentioned"
|
||||||
|
- Present information as direct factual statements
|
||||||
|
|
||||||
|
FINAL RESPONSE FORMAT:
|
||||||
|
<final_response>
|
||||||
|
[Direct synthesized context - factual statements only]
|
||||||
|
|
||||||
|
Good examples:
|
||||||
|
- "The API redesign focuses on performance and scalability. Key decisions: moving to GraphQL, caching layer with Redis."
|
||||||
|
- "Project Phoenix launches Q2 2024. Main features: real-time sync, offline mode, collaborative editing."
|
||||||
|
- "Sarah leads the backend team. Recent work includes authentication refactor and database migration."
|
||||||
|
|
||||||
|
Bad examples:
|
||||||
|
❌ "Previous discussions on the API revealed..."
|
||||||
|
❌ "From past conversations, it appears that..."
|
||||||
|
❌ "Past preferences indicate..."
|
||||||
|
❌ "The user mentioned that..."
|
||||||
|
|
||||||
|
Just state the facts directly.
|
||||||
|
</final_response>
|
||||||
|
|
||||||
|
${contextHints.length > 0 ? `\nCONTEXT HINTS:\n${contextHints.join("\n")}` : ""}
|
||||||
|
|
||||||
|
CRITICAL REQUIREMENTS:
|
||||||
|
- ALWAYS start with DECOMPOSE step - extract entities, topics, temporal markers, actions
|
||||||
|
- Form specific queries from your decomposition - use user's actual terms
|
||||||
|
- Minimum 3 searches required
|
||||||
|
- Maximum 10 searches allowed - must synthesize after that
|
||||||
|
- STOP and synthesize when you hit stopping criteria (20+ episodes, 5+ searches with results, 7+ searches total)
|
||||||
|
- Each search should target different aspects from decomposition
|
||||||
|
- Present synthesis directly without meta-commentary
|
||||||
|
|
||||||
|
SEARCH QUALITY CHECKLIST:
|
||||||
|
✅ Queries use specific terms from content (names, projects, exact phrases)
|
||||||
|
✅ Searched multiple angles from decomposition (entities, topics, related areas)
|
||||||
|
✅ Stop when you have enough unique context - don't search endlessly
|
||||||
|
✅ Tried alternative terminology if initial searches found nothing
|
||||||
|
❌ Avoid generic/abstract queries that don't match user's vocabulary
|
||||||
|
❌ Don't stop at 3 searches if you found zero unique episodes
|
||||||
|
❌ Don't keep searching when you already found 20+ unique episodes
|
||||||
|
}`;
|
||||||
|
}
|
||||||
233
apps/webapp/app/lib/queue-adapter.server.ts
Normal file
233
apps/webapp/app/lib/queue-adapter.server.ts
Normal file
@ -0,0 +1,233 @@
|
|||||||
|
/**
|
||||||
|
* Queue Adapter
|
||||||
|
*
|
||||||
|
* This module provides a unified interface for queueing background jobs,
|
||||||
|
* supporting both Trigger.dev and BullMQ backends based on the QUEUE_PROVIDER
|
||||||
|
* environment variable.
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* - Set QUEUE_PROVIDER="trigger" for Trigger.dev (default, good for production scaling)
|
||||||
|
* - Set QUEUE_PROVIDER="bullmq" for BullMQ (good for open-source deployments)
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { env } from "~/env.server";
|
||||||
|
import type { z } from "zod";
|
||||||
|
import type { IngestBodyRequest } from "~/jobs/ingest/ingest-episode.logic";
|
||||||
|
import type { CreateConversationTitlePayload } from "~/jobs/conversation/create-title.logic";
|
||||||
|
import type { SessionCompactionPayload } from "~/jobs/session/session-compaction.logic";
|
||||||
|
import type { SpaceAssignmentPayload } from "~/jobs/spaces/space-assignment.logic";
|
||||||
|
import type { SpaceSummaryPayload } from "~/jobs/spaces/space-summary.logic";
|
||||||
|
|
||||||
|
type QueueProvider = "trigger" | "bullmq";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enqueue episode ingestion job
|
||||||
|
*/
|
||||||
|
export async function enqueueIngestEpisode(payload: {
|
||||||
|
body: z.infer<typeof IngestBodyRequest>;
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
queueId: string;
|
||||||
|
}): Promise<{ id?: string; token?: string }> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { ingestTask } = await import("~/trigger/ingest/ingest");
|
||||||
|
const handler = await ingestTask.trigger(payload, {
|
||||||
|
queue: "ingestion-queue",
|
||||||
|
concurrencyKey: payload.userId,
|
||||||
|
tags: [payload.userId, payload.queueId],
|
||||||
|
});
|
||||||
|
return { id: handler.id, token: handler.publicAccessToken };
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { ingestQueue } = await import("~/bullmq/queues");
|
||||||
|
const job = await ingestQueue.add("ingest-episode", payload, {
|
||||||
|
jobId: payload.queueId,
|
||||||
|
attempts: 3,
|
||||||
|
backoff: { type: "exponential", delay: 2000 },
|
||||||
|
});
|
||||||
|
return { id: job.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enqueue document ingestion job
|
||||||
|
*/
|
||||||
|
export async function enqueueIngestDocument(payload: {
|
||||||
|
body: z.infer<typeof IngestBodyRequest>;
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
queueId: string;
|
||||||
|
}): Promise<{ id?: string; token?: string }> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { ingestDocumentTask } = await import(
|
||||||
|
"~/trigger/ingest/ingest-document"
|
||||||
|
);
|
||||||
|
const handler = await ingestDocumentTask.trigger(payload, {
|
||||||
|
queue: "document-ingestion-queue",
|
||||||
|
concurrencyKey: payload.userId,
|
||||||
|
tags: [payload.userId, payload.queueId],
|
||||||
|
});
|
||||||
|
return { id: handler.id, token: handler.publicAccessToken };
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { documentIngestQueue } = await import("~/bullmq/queues");
|
||||||
|
const job = await documentIngestQueue.add("ingest-document", payload, {
|
||||||
|
jobId: payload.queueId,
|
||||||
|
attempts: 3,
|
||||||
|
backoff: { type: "exponential", delay: 2000 },
|
||||||
|
});
|
||||||
|
|
||||||
|
return { id: job.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enqueue conversation title creation job
|
||||||
|
*/
|
||||||
|
export async function enqueueCreateConversationTitle(
|
||||||
|
payload: CreateConversationTitlePayload,
|
||||||
|
): Promise<{ id?: string }> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { createConversationTitle } = await import(
|
||||||
|
"~/trigger/conversation/create-conversation-title"
|
||||||
|
);
|
||||||
|
const handler = await createConversationTitle.trigger(payload);
|
||||||
|
return { id: handler.id };
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { conversationTitleQueue } = await import("~/bullmq/queues");
|
||||||
|
const job = await conversationTitleQueue.add(
|
||||||
|
"create-conversation-title",
|
||||||
|
payload,
|
||||||
|
{
|
||||||
|
attempts: 3,
|
||||||
|
backoff: { type: "exponential", delay: 2000 },
|
||||||
|
},
|
||||||
|
);
|
||||||
|
return { id: job.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enqueue session compaction job
|
||||||
|
*/
|
||||||
|
export async function enqueueSessionCompaction(
|
||||||
|
payload: SessionCompactionPayload,
|
||||||
|
): Promise<{ id?: string }> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { sessionCompactionTask } = await import(
|
||||||
|
"~/trigger/session/session-compaction"
|
||||||
|
);
|
||||||
|
const handler = await sessionCompactionTask.trigger(payload);
|
||||||
|
return { id: handler.id };
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { sessionCompactionQueue } = await import("~/bullmq/queues");
|
||||||
|
const job = await sessionCompactionQueue.add(
|
||||||
|
"session-compaction",
|
||||||
|
payload,
|
||||||
|
{
|
||||||
|
attempts: 3,
|
||||||
|
backoff: { type: "exponential", delay: 2000 },
|
||||||
|
},
|
||||||
|
);
|
||||||
|
return { id: job.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enqueue space assignment job
|
||||||
|
*/
|
||||||
|
export async function enqueueSpaceAssignment(
|
||||||
|
payload: SpaceAssignmentPayload,
|
||||||
|
): Promise<{ id?: string }> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { triggerSpaceAssignment } = await import(
|
||||||
|
"~/trigger/spaces/space-assignment"
|
||||||
|
);
|
||||||
|
const handler = await triggerSpaceAssignment(payload);
|
||||||
|
return { id: handler.id };
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { spaceAssignmentQueue } = await import("~/bullmq/queues");
|
||||||
|
const job = await spaceAssignmentQueue.add("space-assignment", payload, {
|
||||||
|
jobId: `space-assignment-${payload.userId}-${payload.mode}-${Date.now()}`,
|
||||||
|
attempts: 3,
|
||||||
|
backoff: { type: "exponential", delay: 2000 },
|
||||||
|
});
|
||||||
|
return { id: job.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enqueue space summary job
|
||||||
|
*/
|
||||||
|
export async function enqueueSpaceSummary(
|
||||||
|
payload: SpaceSummaryPayload,
|
||||||
|
): Promise<{ id?: string }> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { triggerSpaceSummary } = await import(
|
||||||
|
"~/trigger/spaces/space-summary"
|
||||||
|
);
|
||||||
|
const handler = await triggerSpaceSummary(payload);
|
||||||
|
return { id: handler.id };
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { spaceSummaryQueue } = await import("~/bullmq/queues");
|
||||||
|
const job = await spaceSummaryQueue.add("space-summary", payload, {
|
||||||
|
jobId: `space-summary-${payload.spaceId}-${Date.now()}`,
|
||||||
|
attempts: 3,
|
||||||
|
backoff: { type: "exponential", delay: 2000 },
|
||||||
|
});
|
||||||
|
return { id: job.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enqueue BERT topic analysis job
|
||||||
|
*/
|
||||||
|
export async function enqueueBertTopicAnalysis(payload: {
|
||||||
|
userId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
minTopicSize?: number;
|
||||||
|
nrTopics?: number;
|
||||||
|
}): Promise<{ id?: string }> {
|
||||||
|
const provider = env.QUEUE_PROVIDER as QueueProvider;
|
||||||
|
|
||||||
|
if (provider === "trigger") {
|
||||||
|
const { bertTopicAnalysisTask } = await import(
|
||||||
|
"~/trigger/bert/topic-analysis"
|
||||||
|
);
|
||||||
|
const handler = await bertTopicAnalysisTask.trigger(payload, {
|
||||||
|
queue: "bert-topic-analysis",
|
||||||
|
concurrencyKey: payload.userId,
|
||||||
|
tags: [payload.userId, "bert-analysis"],
|
||||||
|
});
|
||||||
|
return { id: handler.id };
|
||||||
|
} else {
|
||||||
|
// BullMQ
|
||||||
|
const { bertTopicQueue } = await import("~/bullmq/queues");
|
||||||
|
const job = await bertTopicQueue.add("topic-analysis", payload, {
|
||||||
|
jobId: `bert-${payload.userId}-${Date.now()}`,
|
||||||
|
attempts: 2, // Only 2 attempts for expensive operations
|
||||||
|
backoff: { type: "exponential", delay: 5000 },
|
||||||
|
});
|
||||||
|
return { id: job.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const isTriggerDeployment = () => {
|
||||||
|
return env.QUEUE_PROVIDER === "trigger";
|
||||||
|
};
|
||||||
@ -1,6 +1,14 @@
|
|||||||
import { clsx, type ClassValue } from "clsx"
|
import { clsx, type ClassValue } from "clsx";
|
||||||
import { twMerge } from "tailwind-merge"
|
import { twMerge } from "tailwind-merge";
|
||||||
|
|
||||||
export function cn(...inputs: ClassValue[]) {
|
export function cn(...inputs: ClassValue[]) {
|
||||||
return twMerge(clsx(inputs))
|
return twMerge(clsx(inputs));
|
||||||
|
}
|
||||||
|
|
||||||
|
export function formatString(input: string): string {
|
||||||
|
if (!input) return "";
|
||||||
|
return input
|
||||||
|
.split(" ")
|
||||||
|
.map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase())
|
||||||
|
.join(" ");
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,6 +2,8 @@ import type { Prisma, User } from "@core/database";
|
|||||||
import type { GoogleProfile } from "@coji/remix-auth-google";
|
import type { GoogleProfile } from "@coji/remix-auth-google";
|
||||||
import { prisma } from "~/db.server";
|
import { prisma } from "~/db.server";
|
||||||
import { env } from "~/env.server";
|
import { env } from "~/env.server";
|
||||||
|
import { runQuery } from "~/lib/neo4j.server";
|
||||||
|
import { trackFeatureUsage } from "~/services/telemetry.server";
|
||||||
export type { User } from "@core/database";
|
export type { User } from "@core/database";
|
||||||
|
|
||||||
type FindOrCreateMagicLink = {
|
type FindOrCreateMagicLink = {
|
||||||
@ -71,9 +73,16 @@ export async function findOrCreateMagicLinkUser(
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const isNewUser = !existingUser;
|
||||||
|
|
||||||
|
// Track new user registration
|
||||||
|
if (isNewUser) {
|
||||||
|
trackFeatureUsage("user_registered", user.id).catch(console.error);
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
user,
|
user,
|
||||||
isNewUser: !existingUser,
|
isNewUser,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -156,22 +165,29 @@ export async function findOrCreateGoogleUser({
|
|||||||
authIdentifier,
|
authIdentifier,
|
||||||
email,
|
email,
|
||||||
authenticationMethod: "GOOGLE",
|
authenticationMethod: "GOOGLE",
|
||||||
UserUsage: {
|
|
||||||
create: {
|
|
||||||
availableCredits: 200,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const isNewUser = !existingUser;
|
||||||
|
|
||||||
|
// Track new user registration
|
||||||
|
if (isNewUser) {
|
||||||
|
trackFeatureUsage("user_registered", user.id).catch(console.error);
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
user,
|
user,
|
||||||
isNewUser: !existingUser,
|
isNewUser,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getUserById(id: User["id"]) {
|
export async function getUserById(id: User["id"]) {
|
||||||
const user = await prisma.user.findUnique({ where: { id } });
|
const user = await prisma.user.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: {
|
||||||
|
Workspace: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
return null;
|
return null;
|
||||||
@ -238,3 +254,45 @@ export async function grantUserCloudAccess({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function deleteUser(id: User["id"]) {
|
||||||
|
// Get user to verify they exist
|
||||||
|
const user = await prisma.user.findUnique({
|
||||||
|
where: { id },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
throw new Error("User not found");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete all user-related nodes from the Neo4j knowledge graph
|
||||||
|
try {
|
||||||
|
// Delete all nodes (Episodes, Entities, Statements, Spaces, Documents, Clusters)
|
||||||
|
// and their relationships where userId matches
|
||||||
|
await runQuery(
|
||||||
|
`
|
||||||
|
MATCH (n {userId: $userId})
|
||||||
|
DETACH DELETE n
|
||||||
|
`,
|
||||||
|
{ userId: id }
|
||||||
|
);
|
||||||
|
console.log(`Deleted all graph nodes for user ${id}`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to delete graph nodes:", error);
|
||||||
|
// Continue with deletion even if graph cleanup fails
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete the user - cascade deletes will handle all related data:
|
||||||
|
// - Workspace (and all workspace-related data via cascade)
|
||||||
|
// - PersonalAccessToken
|
||||||
|
// - UserUsage
|
||||||
|
// - Conversations, ConversationHistory
|
||||||
|
// - IngestionRules
|
||||||
|
// - IntegrationAccounts
|
||||||
|
// - RecallLogs
|
||||||
|
// - WebhookConfigurations
|
||||||
|
// - All OAuth models
|
||||||
|
return prisma.user.delete({
|
||||||
|
where: { id },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
import { type Workspace } from "@core/database";
|
import { type Workspace } from "@core/database";
|
||||||
import { prisma } from "~/db.server";
|
import { prisma } from "~/db.server";
|
||||||
|
import { ensureBillingInitialized } from "~/services/billing.server";
|
||||||
import { sendEmail } from "~/services/email.server";
|
import { sendEmail } from "~/services/email.server";
|
||||||
import { logger } from "~/services/logger.service";
|
import { logger } from "~/services/logger.service";
|
||||||
import { SpaceService } from "~/services/space.server";
|
import { SpaceService } from "~/services/space.server";
|
||||||
@ -13,14 +14,20 @@ interface CreateWorkspaceDto {
|
|||||||
const spaceService = new SpaceService();
|
const spaceService = new SpaceService();
|
||||||
|
|
||||||
const profileRule = `
|
const profileRule = `
|
||||||
Store the user’s stable, non-sensitive identity and preference facts that improve personalization across assistants. Facts must be long-lived (expected validity ≥ 3 months) and broadly useful across contexts (not app-specific).
|
Purpose: Store my identity and preferences to improve personalization across assistants. It should be broadly useful across contexts (not app-specific).
|
||||||
Include (examples):
|
Include (examples):
|
||||||
• Preferred name, pronunciation, public handles (GitHub/Twitter/LinkedIn URLs), primary email domain
|
• Preferred name, pronunciation, public handles (GitHub/Twitter/LinkedIn URLs), primary email domain
|
||||||
• Timezone, locale, working hours, meeting preferences (async/sync bias, default duration)
|
• Timezone, locale, working hours, meeting preferences (async/sync bias, default duration)
|
||||||
• Role, team, company, office location (city-level only), seniority
|
• Role, team, company, office location (city-level only), seniority
|
||||||
• Tooling defaults (editor, ticketing system, repo host), keyboard layout, OS
|
• Tooling defaults (editor, ticketing system, repo host), keyboard layout, OS
|
||||||
• Communication preferences (tone, brevity vs. detail, summary-first)
|
• Communication preferences (tone, brevity vs. detail, summary-first)
|
||||||
Exclude: secrets/credentials; one-off or short-term states; health/financial/political/religious/sexual data; precise home address; raw event logs; app-specific analytics; anything the user did not explicitly consent to share.`;
|
Exclude:
|
||||||
|
• Sensitive: secrets, health/financial/political/religious/sexual data, precise address
|
||||||
|
• Temporary: one-off states, troubleshooting sessions, query results
|
||||||
|
• Context-specific: app behaviors, work conversations, project-specific preferences
|
||||||
|
• Meta: discussions about this memory system, AI architecture, system design
|
||||||
|
• Anything not explicitly consented to share
|
||||||
|
don't store anything the user did not explicitly consent to share.`;
|
||||||
|
|
||||||
export async function createWorkspace(
|
export async function createWorkspace(
|
||||||
input: CreateWorkspaceDto,
|
input: CreateWorkspaceDto,
|
||||||
@ -40,12 +47,10 @@ export async function createWorkspace(
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
await spaceService.createSpace({
|
await ensureBillingInitialized(workspace.id);
|
||||||
name: "Profile",
|
|
||||||
description: profileRule,
|
// Create default spaces
|
||||||
userId: input.userId,
|
await Promise.all([]);
|
||||||
workspaceId: workspace.id,
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await sendEmail({ email: "welcome", to: user.email });
|
const response = await sendEmail({ email: "welcome", to: user.email });
|
||||||
|
|||||||
@ -25,7 +25,7 @@ import {
|
|||||||
type ToastMessage,
|
type ToastMessage,
|
||||||
} from "./models/message.server";
|
} from "./models/message.server";
|
||||||
import { env } from "./env.server";
|
import { env } from "./env.server";
|
||||||
import { getUser, getUserRemainingCount } from "./services/session.server";
|
import { getUser } from "./services/session.server";
|
||||||
import { usePostHog } from "./hooks/usePostHog";
|
import { usePostHog } from "./hooks/usePostHog";
|
||||||
import {
|
import {
|
||||||
AppContainer,
|
AppContainer,
|
||||||
@ -40,6 +40,8 @@ import {
|
|||||||
useTheme,
|
useTheme,
|
||||||
} from "remix-themes";
|
} from "remix-themes";
|
||||||
import clsx from "clsx";
|
import clsx from "clsx";
|
||||||
|
import { getUsageSummary } from "./services/billing.server";
|
||||||
|
import { Toaster } from "./components/ui/toaster";
|
||||||
|
|
||||||
export const links: LinksFunction = () => [{ rel: "stylesheet", href: styles }];
|
export const links: LinksFunction = () => [{ rel: "stylesheet", href: styles }];
|
||||||
|
|
||||||
@ -49,16 +51,19 @@ export const loader = async ({ request }: LoaderFunctionArgs) => {
|
|||||||
const { getTheme } = await themeSessionResolver(request);
|
const { getTheme } = await themeSessionResolver(request);
|
||||||
|
|
||||||
const posthogProjectKey = env.POSTHOG_PROJECT_KEY;
|
const posthogProjectKey = env.POSTHOG_PROJECT_KEY;
|
||||||
|
const telemetryEnabled = env.TELEMETRY_ENABLED;
|
||||||
const user = await getUser(request);
|
const user = await getUser(request);
|
||||||
const usage = await getUserRemainingCount(request);
|
const usageSummary = await getUsageSummary(user?.Workspace?.id as string);
|
||||||
|
|
||||||
return typedjson(
|
return typedjson(
|
||||||
{
|
{
|
||||||
user: user,
|
user: user,
|
||||||
availableCredits: usage?.availableCredits ?? 0,
|
availableCredits: usageSummary?.credits.available ?? 0,
|
||||||
|
totalCredits: usageSummary?.credits.monthly ?? 0,
|
||||||
toastMessage,
|
toastMessage,
|
||||||
theme: getTheme(),
|
theme: getTheme(),
|
||||||
posthogProjectKey,
|
posthogProjectKey,
|
||||||
|
telemetryEnabled,
|
||||||
appEnv: env.APP_ENV,
|
appEnv: env.APP_ENV,
|
||||||
appOrigin: env.APP_ORIGIN,
|
appOrigin: env.APP_ORIGIN,
|
||||||
},
|
},
|
||||||
@ -110,8 +115,10 @@ export function ErrorBoundary() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function App() {
|
function App() {
|
||||||
const { posthogProjectKey } = useTypedLoaderData<typeof loader>();
|
const { posthogProjectKey, telemetryEnabled } =
|
||||||
usePostHog(posthogProjectKey);
|
useTypedLoaderData<typeof loader>();
|
||||||
|
|
||||||
|
usePostHog(posthogProjectKey, telemetryEnabled);
|
||||||
const [theme] = useTheme();
|
const [theme] = useTheme();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@ -124,6 +131,7 @@ function App() {
|
|||||||
</head>
|
</head>
|
||||||
<body className="bg-background-2 h-[100vh] h-full w-[100vw] overflow-hidden font-sans">
|
<body className="bg-background-2 h-[100vh] h-full w-[100vw] overflow-hidden font-sans">
|
||||||
<Outlet />
|
<Outlet />
|
||||||
|
<Toaster />
|
||||||
<ScrollRestoration />
|
<ScrollRestoration />
|
||||||
|
|
||||||
<Scripts />
|
<Scripts />
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user