diff --git a/.env.example b/.env.example index c25d111..754d9b0 100644 --- a/.env.example +++ b/.env.example @@ -12,7 +12,7 @@ POSTGRES_PASSWORD=docker POSTGRES_DB=core LOGIN_ORIGIN=http://localhost:3033 -DATABASE_URL="postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB}?schema=core" +DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB}?schema=core" # This sets the URL used for direct connections to the database and should only be needed in limited circumstances # See: https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#fields:~:text=the%20shadow%20database.-,directUrl,-No @@ -22,6 +22,8 @@ REMIX_APP_PORT=3033 APP_ENV=production NODE_ENV=${APP_ENV} APP_ORIGIN=http://localhost:3033 +API_BASE_URL=${APP_ORIGIN} + SESSION_SECRET=27192e6432564f4788d55c15131bd5ac ENCRYPTION_KEY=27192e6432564f4788d55c15131bd5ac @@ -48,8 +50,8 @@ MAGIC_LINK_SECRET=27192e6432564f4788d55c15131bd5ac NEO4J_AUTH=neo4j/27192e6432564f4788d55c15131bd5ac OLLAMA_URL=http://ollama:11434 -EMBEDDING_MODEL=GPT41 -MODEL=GPT41 +EMBEDDING_MODEL=text-embedding-3-small +MODEL=gpt-4.1-2025-04-14 ## Trigger ## TRIGGER_PROJECT_ID= diff --git a/apps/webapp/app/components/logs/logs-filters.tsx b/apps/webapp/app/components/logs/logs-filters.tsx index f64dd13..4d391f5 100644 --- a/apps/webapp/app/components/logs/logs-filters.tsx +++ b/apps/webapp/app/components/logs/logs-filters.tsx @@ -51,7 +51,7 @@ export function LogsFilters({ const handleBack = () => setStep("main"); return ( -
+
{ diff --git a/apps/webapp/app/components/logs/virtual-logs-list.tsx b/apps/webapp/app/components/logs/virtual-logs-list.tsx index 3525287..39d5dc1 100644 --- a/apps/webapp/app/components/logs/virtual-logs-list.tsx +++ b/apps/webapp/app/components/logs/virtual-logs-list.tsx @@ -153,7 +153,7 @@ export function VirtualLogsList({ const itemCount = hasMore ? logs.length + 1 : logs.length; return ( -
+
{({ width, height: autoHeight }) => ( +
{logs.length === 0 ? ( - +
diff --git a/apps/webapp/app/routes/home.logs.all.tsx b/apps/webapp/app/routes/home.logs.all.tsx index 2074ef9..8ab7d83 100644 --- a/apps/webapp/app/routes/home.logs.all.tsx +++ b/apps/webapp/app/routes/home.logs.all.tsx @@ -45,7 +45,7 @@ export default function LogsAll() { }, ]} /> -
+
{isInitialLoad ? ( <> {" "} @@ -64,10 +64,10 @@ export default function LogsAll() { /> )} {/* Logs List */} -
+
{logs.length === 0 ? ( - - + +

diff --git a/apps/webapp/app/routes/login.magic.tsx b/apps/webapp/app/routes/login.magic.tsx index 1c836c0..f54ca44 100644 --- a/apps/webapp/app/routes/login.magic.tsx +++ b/apps/webapp/app/routes/login.magic.tsx @@ -158,7 +158,8 @@ export default function LoginMagicLinkPage() {
} + confirmButton={
diff --git a/apps/webapp/app/routes/oauth.authorize.tsx b/apps/webapp/app/routes/oauth.authorize.tsx index 4a95168..cb03a99 100644 --- a/apps/webapp/app/routes/oauth.authorize.tsx +++ b/apps/webapp/app/routes/oauth.authorize.tsx @@ -199,13 +199,13 @@ export default function OAuthAuthorize() {

Permissions

    - {params.scope?.split(",").map((scope, index, arr) => { + {params.scope?.split(" ").map((scope, index, arr) => { const isFirst = index === 0; const isLast = index === arr.length - 1; return (
  • {getIcon(scope)}
    diff --git a/apps/webapp/app/services/oauth/oauth.server.ts b/apps/webapp/app/services/oauth/oauth.server.ts index 7ced15b..0980729 100644 --- a/apps/webapp/app/services/oauth/oauth.server.ts +++ b/apps/webapp/app/services/oauth/oauth.server.ts @@ -278,14 +278,14 @@ export async function getRedirectURLForMCP( const spec = integrationDefinition.spec as any; - if (!spec.mcpAuth) { + if (!spec.mcp) { throw new Error("MCP auth configuration not found for this integration"); } - const { serverUrl, transportStrategy } = spec.mcpAuth; + const { url, transportStrategy } = spec.mcp; const authClient = createMCPAuthClient({ - serverUrl, + serverUrl: url, transportStrategy: transportStrategy || "sse-first", redirectUrl: MCP_CALLBACK_URL, }); diff --git a/apps/webapp/app/services/oauth2.server.ts b/apps/webapp/app/services/oauth2.server.ts index 08f345e..b271ee0 100644 --- a/apps/webapp/app/services/oauth2.server.ts +++ b/apps/webapp/app/services/oauth2.server.ts @@ -83,6 +83,7 @@ export class OAuth2Service { // Validate redirect URI validateRedirectUri(client: any, redirectUri: string): boolean { + console.log(redirectUri); const allowedUris = client.redirectUris .split(",") .map((uri: string) => uri.trim()); diff --git a/apps/webapp/app/utils/oauth2-middleware.ts b/apps/webapp/app/utils/oauth2-middleware.ts index 2131262..29c4c87 100644 --- a/apps/webapp/app/utils/oauth2-middleware.ts +++ b/apps/webapp/app/utils/oauth2-middleware.ts @@ -24,11 +24,14 @@ export interface OAuth2Context { export async function requireOAuth2(request: Request): Promise { const authHeader = request.headers.get("authorization"); - + if (!authHeader || !authHeader.startsWith("Bearer ")) { throw json( - { error: "invalid_token", error_description: "Missing or invalid authorization header" }, - { status: 401 } + { + error: "invalid_token", + error_description: "Missing or invalid authorization header", + }, + { status: 401 }, ); } @@ -36,7 +39,7 @@ export async function requireOAuth2(request: Request): Promise { try { const accessToken = await oauth2Service.validateAccessToken(token); - + return { user: { id: accessToken.user.id, @@ -59,13 +62,18 @@ export async function requireOAuth2(request: Request): Promise { }; } catch (error) { throw json( - { error: "invalid_token", error_description: "Invalid or expired access token" }, - { status: 401 } + { + error: "invalid_token", + error_description: "Invalid or expired access token", + }, + { status: 401 }, ); } } -export async function getOAuth2Context(request: Request): Promise { +export async function getOAuth2Context( + request: Request, +): Promise { try { return await requireOAuth2(request); } catch (error) { @@ -73,20 +81,31 @@ export async function getOAuth2Context(request: Request): Promise ({ - DATABASE_URL: process.env.DATABASE_URL as string, + ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY as string, API_BASE_URL: process.env.API_BASE_URL as string, + DATABASE_URL: process.env.DATABASE_URL as string, + EMBEDDING_MODEL: process.env.EMBEDDING_MODEL as string, + ENCRYPTION_KEY: process.env.ENCRYPTION_KEY as string, + MODEL: process.env.MODEL ?? "gpt-4.1-2025-04-14", + NEO4J_PASSWORD: process.env.NEO4J_PASSWORD as string, + NEO4J_URI: process.env.NEO4J_URI as string, + NEO4J_USERNAME: process.env.NEO4J_USERNAME as string, + OPENAI_API_KEY: process.env.OPENAI_API_KEY as string, })), prismaExtension({ schema: "prisma/schema.prisma", diff --git a/docker-compose.yaml b/docker-compose.yaml index ee17774..628b027 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -40,7 +40,7 @@ services: postgres: container_name: core-postgres - image: redplanethq/postgres:0.1.0 + image: redplanethq/postgres:0.1.2 environment: - POSTGRES_USER=${POSTGRES_USER} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} diff --git a/integrations/linear/spec.json b/integrations/linear/spec.json index 9dd6b86..fa70852 100644 --- a/integrations/linear/spec.json +++ b/integrations/linear/spec.json @@ -12,9 +12,9 @@ "label": "Linear API Key" } }, - "mcpAuth": { - "serverUrl": "https://mcp.linear.app/sse", - "transportStrategy": "sse-first", - "needsSeparateAuth": true + "mcp": { + "type": "http", + "url": "https://mcp.linear.app/mcp", + "needsAuth": true } } \ No newline at end of file diff --git a/integrations/slack/spec.json b/integrations/slack/spec.json index 34a46c3..9a39bd5 100644 --- a/integrations/slack/spec.json +++ b/integrations/slack/spec.json @@ -4,12 +4,11 @@ "description": "Connect your workspace to Slack. Run your workflows from slack bookmarks", "icon": "slack", "mcp": { + "type": "stdio", "command": "npx", - "args": ["-y", "@modelcontextprotocol/server-slack"], + "args": [ "-y", "@modelcontextprotocol/server-slack" ], "env": { - "SLACK_BOT_TOKEN": "${config:access_token}", - "SLACK_TEAM_ID": "${config:team_id}", - "SLACK_CHANNEL_IDS": "${config:channel_ids}" + "SLACK_BOT_TOKEN": "${config:access_token}" } }, "auth": { diff --git a/packages/core-cli/package.json b/packages/core-cli/package.json index f2c905e..72c35dc 100644 --- a/packages/core-cli/package.json +++ b/packages/core-cli/package.json @@ -1,6 +1,6 @@ { "name": "@redplanethq/core", - "version": "0.1.1", + "version": "0.1.2", "description": "A Command-Line Interface for Core", "type": "module", "license": "MIT", @@ -89,6 +89,7 @@ "commander": "^9.4.1", "defu": "^6.1.4", "dotenv": "^16.4.5", + "dotenv-expand": "^12.0.2", "esbuild": "^0.23.0", "eventsource": "^3.0.2", "evt": "^2.4.13", diff --git a/packages/core-cli/src/commands/init.ts b/packages/core-cli/src/commands/init.ts index 87b34e8..ac45ab0 100644 --- a/packages/core-cli/src/commands/init.ts +++ b/packages/core-cli/src/commands/init.ts @@ -1,10 +1,11 @@ import { intro, outro, text, confirm, spinner, note, log } from "@clack/prompts"; import { fileExists, updateEnvFile } from "../utils/file.js"; import { checkPostgresHealth } from "../utils/docker.js"; -import { executeDockerCommandInteractive } from "../utils/docker-interactive.js"; +import { executeCommandInteractive } from "../utils/docker-interactive.js"; import { printCoreBrainLogo } from "../utils/ascii.js"; import { setupEnvFile } from "../utils/env.js"; import { hasTriggerConfig } from "../utils/env-checker.js"; +import { getDockerCompatibleEnvVars } from "../utils/env-docker.js"; import path from "path"; export async function initCommand() { @@ -14,20 +15,27 @@ export async function initCommand() { intro("πŸš€ Core Development Environment Setup"); // Step 1: Confirm this is the Core repository - note("Please ensure you have:\nβ€’ Docker and Docker Compose installed\nβ€’ Git installed\nβ€’ pnpm package manager installed\nβ€’ You are in the Core repository directory", "πŸ“‹ Prerequisites"); - + note( + "Please ensure you have:\nβ€’ Docker and Docker Compose installed\nβ€’ Git installed\nβ€’ pnpm package manager installed\nβ€’ You are in the Core repository directory", + "πŸ“‹ Prerequisites" + ); + const isCoreRepo = await confirm({ message: "Are you currently in the Core repository directory?", }); if (!isCoreRepo) { - note("Please clone the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run 'core init' again.", "πŸ“₯ Clone Repository"); + note( + "Please clone the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run 'core init' again.", + "πŸ“₯ Clone Repository" + ); outro("❌ Setup cancelled. Please navigate to the Core repository first."); process.exit(1); } const rootDir = process.cwd(); const triggerDir = path.join(rootDir, "trigger"); + const webappDir = path.join(rootDir, "apps", "webapp"); try { // Step 2: Setup .env file in root @@ -51,7 +59,7 @@ export async function initCommand() { // Step 3: Docker compose up -d in root try { - await executeDockerCommandInteractive("docker compose up -d", { + await executeCommandInteractive("docker compose up -d", { cwd: rootDir, message: "Starting Docker containers in root...", showOutput: true, @@ -103,7 +111,7 @@ export async function initCommand() { // Step 6: Docker compose up for trigger try { - await executeDockerCommandInteractive("docker compose up -d", { + await executeCommandInteractive("docker compose up -d", { cwd: triggerDir, message: "Starting Trigger.dev containers...", showOutput: true, @@ -175,6 +183,16 @@ export async function initCommand() { }, }); + const openaiApiKey = await text({ + message: "Enter your OpenAI API Key:", + validate: (value) => { + if (!value || value.length === 0) { + return "OpenAI API Key is required"; + } + return; + }, + }); + // Step 11: Update .env with project details const s6 = spinner(); s6.start("Updating .env with Trigger.dev configuration..."); @@ -182,6 +200,7 @@ export async function initCommand() { try { await updateEnvFile(envPath, "TRIGGER_PROJECT_ID", projectId as string); await updateEnvFile(envPath, "TRIGGER_SECRET_KEY", secretKey as string); + await updateEnvFile(envPath, "OPENAI_API_KEY", openaiApiKey as string); s6.stop("βœ… Updated .env with Trigger.dev configuration"); } catch (error: any) { s6.stop("❌ Failed to update .env file"); @@ -190,13 +209,13 @@ export async function initCommand() { // Step 12: Restart root docker-compose with new configuration try { - await executeDockerCommandInteractive("docker compose down", { + await executeCommandInteractive("docker compose down", { cwd: rootDir, message: "Stopping Core services...", showOutput: true, }); - await executeDockerCommandInteractive("docker compose up -d", { + await executeCommandInteractive("docker compose up -d", { cwd: rootDir, message: "Starting Core services with new Trigger.dev configuration...", showOutput: true, @@ -226,10 +245,10 @@ export async function initCommand() { const dockerRegistryPassword = getEnvValue("DOCKER_REGISTRY_PASSWORD"); log.info( - `docker login ${dockerRegistryUrl} -u ${dockerRegistryUsername} -p ${dockerRegistryPassword}` + `docker login -u ${dockerRegistryUsername} -p ${dockerRegistryPassword} ${dockerRegistryUrl} ` ); } catch (error) { - log.info("docker login -u -p "); + log.info("docker login -u -p "); } const dockerLoginConfirmed = await confirm({ @@ -249,8 +268,8 @@ export async function initCommand() { try { // Login to trigger.dev CLI - await executeDockerCommandInteractive( - "npx -y trigger.dev@v4-beta login -a http://localhost:8030", + await executeCommandInteractive( + "npx -y trigger.dev@4.0.0-v4-beta.22 login -a http://localhost:8030", { cwd: rootDir, message: "Logging in to Trigger.dev CLI...", @@ -258,11 +277,27 @@ export async function initCommand() { } ); - // Deploy trigger tasks - await executeDockerCommandInteractive("pnpm trigger:deploy", { + await executeCommandInteractive("pnpm install", { cwd: rootDir, + message: "Running package installation", + showOutput: true, + }); + + await executeCommandInteractive("pnpm build --filter=@core/types --filter=@core/database", { + cwd: rootDir, + message: "Building @core/types and @core/database with turbo...", + showOutput: true, + }); + + // Deploy trigger tasks + const envVars = await getDockerCompatibleEnvVars(rootDir); + + console.log(envVars); + await executeCommandInteractive("pnpm run trigger:deploy", { + cwd: webappDir, message: "Deploying Trigger.dev tasks...", showOutput: true, + env: envVars, }); log.success("Trigger.dev tasks deployed successfully!"); diff --git a/packages/core-cli/src/commands/start.ts b/packages/core-cli/src/commands/start.ts index 2d061ae..00cbbef 100644 --- a/packages/core-cli/src/commands/start.ts +++ b/packages/core-cli/src/commands/start.ts @@ -1,50 +1,55 @@ -import { intro, outro, note, log, confirm } from '@clack/prompts'; -import { executeDockerCommandInteractive } from '../utils/docker-interactive.js'; -import { printCoreBrainLogo } from '../utils/ascii.js'; -import path from 'path'; +import { intro, outro, note, log, confirm } from "@clack/prompts"; +import { executeCommandInteractive } from "../utils/docker-interactive.js"; +import { printCoreBrainLogo } from "../utils/ascii.js"; +import path from "path"; export async function startCommand() { // Display the CORE brain logo printCoreBrainLogo(); - - intro('πŸš€ Starting Core Development Environment'); + + intro("πŸš€ Starting Core Development Environment"); // Step 1: Confirm this is the Core repository const isCoreRepo = await confirm({ - message: 'Are you currently in the Core repository directory?', + message: "Are you currently in the Core repository directory?", }); if (!isCoreRepo) { - note('Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core start" again.', 'πŸ“₯ Core Repository Required'); - outro('❌ Please navigate to the Core repository first.'); + note( + 'Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core start" again.', + "πŸ“₯ Core Repository Required" + ); + outro("❌ Please navigate to the Core repository first."); process.exit(1); } const rootDir = process.cwd(); - const triggerDir = path.join(rootDir, 'trigger'); + const triggerDir = path.join(rootDir, "trigger"); try { // Start main services - await executeDockerCommandInteractive('docker compose up -d', { + await executeCommandInteractive("docker compose up -d", { cwd: rootDir, - message: 'Starting Core services...', - showOutput: true + message: "Starting Core services...", + showOutput: true, }); // Start trigger services - await executeDockerCommandInteractive('docker compose up -d', { + await executeCommandInteractive("docker compose up -d", { cwd: triggerDir, - message: 'Starting Trigger.dev services...', - showOutput: true + message: "Starting Trigger.dev services...", + showOutput: true, }); // Final success message - outro('πŸŽ‰ Core Development Environment Started!'); - note('β€’ Core Application: http://localhost:3033\nβ€’ Trigger.dev: http://localhost:8030\nβ€’ PostgreSQL: localhost:5432', '🌐 Your services are now running'); - log.success('Happy coding!'); - + outro("πŸŽ‰ Core Development Environment Started!"); + note( + "β€’ Core Application: http://localhost:3033\nβ€’ Trigger.dev: http://localhost:8030\nβ€’ PostgreSQL: localhost:5432", + "🌐 Your services are now running" + ); + log.success("Happy coding!"); } catch (error: any) { outro(`❌ Failed to start services: ${error.message}`); process.exit(1); } -} \ No newline at end of file +} diff --git a/packages/core-cli/src/commands/stop.ts b/packages/core-cli/src/commands/stop.ts index aa1b2a6..111222b 100644 --- a/packages/core-cli/src/commands/stop.ts +++ b/packages/core-cli/src/commands/stop.ts @@ -1,50 +1,52 @@ -import { intro, outro, log, confirm, note } from '@clack/prompts'; -import { executeDockerCommandInteractive } from '../utils/docker-interactive.js'; -import { printCoreBrainLogo } from '../utils/ascii.js'; -import path from 'path'; +import { intro, outro, log, confirm, note } from "@clack/prompts"; +import { executeCommandInteractive } from "../utils/docker-interactive.js"; +import { printCoreBrainLogo } from "../utils/ascii.js"; +import path from "path"; export async function stopCommand() { // Display the CORE brain logo printCoreBrainLogo(); - - intro('πŸ›‘ Stopping Core Development Environment'); + + intro("πŸ›‘ Stopping Core Development Environment"); // Step 1: Confirm this is the Core repository const isCoreRepo = await confirm({ - message: 'Are you currently in the Core repository directory?', + message: "Are you currently in the Core repository directory?", }); if (!isCoreRepo) { - note('Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core stop" again.', 'πŸ“₯ Core Repository Required'); - outro('❌ Please navigate to the Core repository first.'); + note( + 'Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core stop" again.', + "πŸ“₯ Core Repository Required" + ); + outro("❌ Please navigate to the Core repository first."); process.exit(1); } const rootDir = process.cwd(); - const triggerDir = path.join(rootDir, 'trigger'); + const triggerDir = path.join(rootDir, "trigger"); try { // Stop trigger services first - await executeDockerCommandInteractive('docker compose down', { + await executeCommandInteractive("docker compose down", { cwd: triggerDir, - message: 'Stopping Trigger.dev services...', - showOutput: true + message: "Stopping Trigger.dev services...", + showOutput: true, }); // Stop main services - await executeDockerCommandInteractive('docker compose down', { + await executeCommandInteractive("docker compose down", { cwd: rootDir, - message: 'Stopping Core services...', - showOutput: true + message: "Stopping Core services...", + showOutput: true, }); // Final success message - outro('πŸŽ‰ Core Development Environment Stopped!'); - log.success('All services have been stopped.'); + outro("πŸŽ‰ Core Development Environment Stopped!"); + log.success("All services have been stopped."); log.info('Run "core start" to start services again.'); - } catch (error: any) { outro(`❌ Failed to stop services: ${error.message}`); process.exit(1); } -} \ No newline at end of file +} diff --git a/packages/core-cli/src/utils/docker-interactive.ts b/packages/core-cli/src/utils/docker-interactive.ts index e589561..2525026 100644 --- a/packages/core-cli/src/utils/docker-interactive.ts +++ b/packages/core-cli/src/utils/docker-interactive.ts @@ -1,46 +1,45 @@ -import { spawn, ChildProcess } from 'child_process'; -import { spinner } from '@clack/prompts'; +import { spawn, ChildProcess } from "child_process"; +import { spinner } from "@clack/prompts"; -export interface DockerCommandOptions { +export interface CommandOptions { cwd: string; message: string; showOutput?: boolean; + env?: Record; } -export function executeDockerCommandInteractive( - command: string, - options: DockerCommandOptions -): Promise { +export function executeCommandInteractive(command: string, options: CommandOptions): Promise { return new Promise((resolve, reject) => { const s = spinner(); s.start(options.message); // Split command into parts - const parts = command.split(' '); + const parts = command.split(" "); const cmd = parts[0]; const args = parts.slice(1); - + if (!cmd) { - reject(new Error('Invalid command')); + reject(new Error("Invalid command")); return; } const child: ChildProcess = spawn(cmd, args, { cwd: options.cwd, - stdio: options.showOutput ? ['ignore', 'pipe', 'pipe'] : 'ignore', - detached: false + stdio: options.showOutput ? ["ignore", "pipe", "pipe"] : "ignore", + detached: false, + env: options.env ? { ...process.env, ...options.env } : { ...process.env }, }); - let output = ''; + let output = ""; // Handle stdout if (child.stdout && options.showOutput) { - child.stdout.on('data', (data: Buffer) => { + child.stdout.on("data", (data: Buffer) => { const text = data.toString(); output += text; - + // Update spinner with latest output line - const lines = text.trim().split('\n'); + const lines = text.trim().split("\n"); const lastLine = lines[lines.length - 1]; if (lastLine && lastLine.trim()) { s.message(`${options.message}\n${lastLine.trim()}`); @@ -50,12 +49,13 @@ export function executeDockerCommandInteractive( // Handle stderr if (child.stderr && options.showOutput) { - child.stderr.on('data', (data: Buffer) => { + child.stderr.on("data", (data: Buffer) => { const text = data.toString(); output += text; - + // console.log(text); + // Update spinner with error output - const lines = text.trim().split('\n'); + const lines = text.trim().split("\n"); const lastLine = lines[lines.length - 1]; if (lastLine && lastLine.trim()) { s.message(`${options.message}\n❌ ${lastLine.trim()}`); @@ -64,14 +64,14 @@ export function executeDockerCommandInteractive( } // Handle process exit - child.on('exit', (code: number | null) => { + child.on("exit", (code: number | null) => { if (code === 0) { - s.stop(`βœ… ${options.message.replace(/\.\.\.$/, '')} completed`); + s.stop(`βœ… ${options.message.replace(/\.\.\.$/, "")} completed`); resolve(); } else { - s.stop(`❌ ${options.message.replace(/\.\.\.$/, '')} failed (exit code: ${code})`); + s.stop(`❌ ${options.message.replace(/\.\.\.$/, "")} failed (exit code: ${code})`); if (options.showOutput && output) { - console.log('\nOutput:'); + console.log("\nOutput:"); console.log(output); } reject(new Error(`Command failed with exit code ${code}`)); @@ -79,30 +79,30 @@ export function executeDockerCommandInteractive( }); // Handle errors - child.on('error', (error: Error) => { - s.stop(`❌ ${options.message.replace(/\.\.\.$/, '')} failed`); + child.on("error", (error: Error) => { + s.stop(`❌ ${options.message.replace(/\.\.\.$/, "")} failed`); reject(error); }); // Handle Ctrl+C const handleSigint = () => { - s.stop(`⏹️ ${options.message.replace(/\.\.\.$/, '')} interrupted`); - child.kill('SIGTERM'); - + s.stop(`⏹️ ${options.message.replace(/\.\.\.$/, "")} interrupted`); + child.kill("SIGTERM"); + // Give the process time to clean up setTimeout(() => { if (child.killed === false) { - child.kill('SIGKILL'); + child.kill("SIGKILL"); } process.exit(130); // Standard exit code for SIGINT }, 5000); }; - process.on('SIGINT', handleSigint); + process.on("SIGINT", handleSigint); // Clean up event listener when done - child.on('exit', () => { - process.off('SIGINT', handleSigint); + child.on("exit", () => { + process.off("SIGINT", handleSigint); }); }); -} \ No newline at end of file +} diff --git a/packages/core-cli/src/utils/env-docker.ts b/packages/core-cli/src/utils/env-docker.ts new file mode 100644 index 0000000..0156f1b --- /dev/null +++ b/packages/core-cli/src/utils/env-docker.ts @@ -0,0 +1,49 @@ +import path from "path"; +import fs from "fs"; +import dotenv from "dotenv"; +import dotenvExpand from "dotenv-expand"; + +/** + * Reads environment variables from .env file and replaces localhost URLs with host.docker.internal + * for Docker container compatibility + */ +export async function getDockerCompatibleEnvVars(rootDir: string): Promise> { + const envPath = path.join(rootDir, ".env"); + + try { + // Use dotenv to parse and expand variables + + const envVarsExpand = + dotenvExpand.expand(dotenv.config({ path: envPath, processEnv: {} })).parsed || {}; + + console.log(JSON.stringify(envVarsExpand)); + const getEnvValue = (key: string): string => { + return envVarsExpand[key] || ""; + }; + + const replaceLocalhostWithDockerHost = (value: string): string => { + return value + .replace(/localhost/g, "host.docker.internal") + .replace(/127\.0\.0\.1/g, "host.docker.internal"); + }; + + // Get all required environment variables + const envVars = { + ANTHROPIC_API_KEY: getEnvValue("ANTHROPIC_API_KEY"), + API_BASE_URL: replaceLocalhostWithDockerHost(getEnvValue("API_BASE_URL")), + DATABASE_URL: replaceLocalhostWithDockerHost(getEnvValue("DATABASE_URL")), + EMBEDDING_MODEL: getEnvValue("EMBEDDING_MODEL"), + ENCRYPTION_KEY: getEnvValue("ENCRYPTION_KEY"), + MODEL: getEnvValue("MODEL") || "gpt-4.1-2025-04-14", + NEO4J_PASSWORD: getEnvValue("NEO4J_PASSWORD"), + NEO4J_URI: replaceLocalhostWithDockerHost(getEnvValue("NEO4J_URI")), + NEO4J_USERNAME: getEnvValue("NEO4J_USERNAME"), + OPENAI_API_KEY: getEnvValue("OPENAI_API_KEY"), + TRIGGER_PROJECT_ID: getEnvValue("TRIGGER_PROJECT_ID"), + }; + + return envVars; + } catch (error) { + throw new Error(`Failed to read .env file: ${error}`); + } +} diff --git a/packages/database/package.json b/packages/database/package.json index 10f3958..5dac57f 100644 --- a/packages/database/package.json +++ b/packages/database/package.json @@ -21,7 +21,7 @@ "db:studio": "prisma studio", "db:reset": "prisma migrate reset", "typecheck": "tsc --noEmit", - "build": "pnpm run clean && tsc --noEmit false --outDir dist --declaration", + "build": "pnpm run generate && pnpm run clean && tsc --noEmit false --outDir dist --declaration", "dev": "tsc --noEmit false --outDir dist --declaration --watch" } } \ No newline at end of file diff --git a/packages/mcp-proxy/src/core/mcp-remote-client.ts b/packages/mcp-proxy/src/core/mcp-remote-client.ts index 26f9975..ec093f6 100644 --- a/packages/mcp-proxy/src/core/mcp-remote-client.ts +++ b/packages/mcp-proxy/src/core/mcp-remote-client.ts @@ -1,6 +1,7 @@ import { Client } from "@modelcontextprotocol/sdk/client/index.js"; import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js"; import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js"; +import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js"; import { MCPRemoteClientConfig, AuthenticationResult, @@ -153,7 +154,7 @@ export function createMCPProxy( redirectUrl: string, transportStrategy: TransportStrategy = "sse-first", clientHeaders?: { sessionId?: string | null; lastEventId?: string | null } - ): Promise { + ): Promise { // Create auth provider with stored credentials using common factory const authProvider = await createAuthProviderForProxy(serverUrl, credentials, redirectUrl); @@ -173,9 +174,16 @@ export function createMCPProxy( } // Create transport based on strategy (don't start yet) - let transport: SSEClientTransport | StreamableHTTPClientTransport; + let transport: SSEClientTransport | StreamableHTTPClientTransport | StdioClientTransport; switch (transportStrategy) { + case "stdio": + // For stdio transport, serverUrl should contain the command to execute + // This is mainly for completeness - prefer using createMCPStdioProxy directly + throw new Error( + "Stdio transport not supported in createRemoteTransport. Use createMCPStdioProxy instead." + ); + case "sse-only": transport = new SSEClientTransport(url, { authProvider, @@ -227,6 +235,257 @@ export function createMCPProxy( } } +/** + * Creates an MCP proxy that forwards requests to a stdio process. + * Maintains a mapping of sessionId -> StdioClientTransport for reuse. + * If sessionId is provided, it is returned in the response header as mcp_session_id. + * @param request The incoming HTTP request + * @param command The command to execute for the stdio process + * @param args Arguments for the command + * @param options Optional configuration for the proxy + * @param sessionId Optional session id for transport reuse + * @returns Promise that resolves to the HTTP response + */ +// Track both the transport and its last used timestamp +type StdioTransportEntry = { + transport: StdioClientTransport; + lastUsed: number; // ms since epoch +}; + +const stdioTransports: Map = new Map(); + +/** + * Cleans up any stdio transports that have not been used in the last 5 minutes. + * Closes and removes them from the map. + */ +function cleanupOldStdioTransports() { + const now = Date.now(); + const FIVE_MINUTES = 5 * 60 * 1000; + for (const [sessionId, entry] of stdioTransports.entries()) { + if (now - entry.lastUsed > FIVE_MINUTES) { + try { + entry.transport.close?.(); + } catch (err) { + // ignore + } + stdioTransports.delete(sessionId); + } + } +} + +export function createMCPStdioProxy( + request: Request, + command: string, + args?: string[], + options?: { + /** Enable debug logging */ + debug?: boolean; + /** Environment variables to pass to the process */ + env?: Record; + /** Custom header-to-environment variable mapping */ + headerMapping?: Record; + /** Optional session id for transport reuse */ + sessionId?: string; + } +): Promise { + return new Promise(async (resolve) => { + let bridge: any = null; + let serverTransport: StdioClientTransport | undefined; + let sessionId: string | undefined = + options?.sessionId || request.headers.get("Mcp-Session-Id") || undefined; + + // Clean up old transports before handling new connection + cleanupOldStdioTransports(); + + try { + // Extract headers from the incoming request and convert to environment variables + const env = createEnvironmentFromRequest( + request, + options?.env || {}, + options?.headerMapping || {} + ); + + // If sessionId is provided, try to reuse the transport + let entry: StdioTransportEntry | undefined; + if (sessionId) { + entry = stdioTransports.get(sessionId); + if (entry) { + serverTransport = entry.transport; + entry.lastUsed = Date.now(); + } + } + + // If no transport exists for this sessionId, create a new one and store it + if (!serverTransport) { + serverTransport = new StdioClientTransport({ + command, + args: args || [], + env, + }); + await serverTransport.start(); + if (sessionId) { + stdioTransports.set(sessionId, { + transport: serverTransport, + lastUsed: Date.now(), + }); + } + } + + // Create Remix transport (converts HTTP to MCP messages) + // We need to wrap resolve to inject the sessionId header if present + const resolveWithSessionId = (response: Response) => { + if (sessionId) { + // Clone the response and add the mcp_session_id header + const headers = new Headers(response.headers); + headers.set("mcp-session-id", sessionId); + resolve( + new Response(response.body, { + status: response.status, + statusText: response.statusText, + headers, + }) + ); + } else { + resolve(response); + } + }; + + const clientTransport = new RemixMCPTransport(request, resolveWithSessionId); + + // Bridge the transports + const bridgeOptions: any = { + debug: options?.debug || false, + onError: (error: Error, source: string) => { + console.error(`[MCP Stdio Bridge] ${source} error:`, error); + }, + }; + + if (options?.debug) { + bridgeOptions.onMessage = (direction: string, message: any) => { + console.log(`[MCP Stdio Bridge] ${direction}:`, message.method || message.id); + }; + } + + bridge = createMCPTransportBridge( + clientTransport as any, + serverTransport as any, + bridgeOptions + ); + + // Start only the client transport (server is already started) + await clientTransport.start(); + } catch (error) { + console.error("MCP Stdio Proxy Error:", error); + + if (bridge) { + bridge.close().catch(console.error); + } + + const errorMessage = error instanceof Error ? error.message : String(error); + // Always include mcp_session_id header if sessionId is present + const headers: Record = { "Content-Type": "application/json" }; + if (sessionId) { + headers["mcp-session-id"] = sessionId; + } + resolve( + new Response( + JSON.stringify({ + error: `Stdio proxy error: ${errorMessage}`, + }), + { + status: 500, + headers, + } + ) + ); + } + }); +} + +/** + * Creates environment variables from request headers + */ +function createEnvironmentFromRequest( + request: Request, + baseEnv: Record, + headerMapping: Record +): Record { + // Start with base environment (inherit safe environment variables) + const env: Record = { + ...getDefaultEnvironment(), + ...baseEnv, + }; + + // Add standard MCP headers as environment variables + const sessionId = request.headers.get("Mcp-Session-Id"); + const lastEventId = request.headers.get("Last-Event-Id"); + const contentType = request.headers.get("Content-Type"); + const userAgent = request.headers.get("User-Agent"); + + if (sessionId) { + env["MCP_SESSION_ID"] = sessionId; + } + if (lastEventId) { + env["MCP_LAST_EVENT_ID"] = lastEventId; + } + if (contentType) { + env["MCP_CONTENT_TYPE"] = contentType; + } + if (userAgent) { + env["MCP_USER_AGENT"] = userAgent; + } + + // Apply custom header-to-environment variable mapping + for (const [headerName, envVarName] of Object.entries(headerMapping)) { + const headerValue = request.headers.get(headerName); + if (headerValue) { + env[envVarName] = headerValue; + } + } + + return env; +} + +/** + * Returns a default environment object including only environment variables deemed safe to inherit. + */ +function getDefaultEnvironment(): Record { + const DEFAULT_INHERITED_ENV_VARS = + process.platform === "win32" + ? [ + "APPDATA", + "HOMEDRIVE", + "HOMEPATH", + "LOCALAPPDATA", + "PATH", + "PROCESSOR_ARCHITECTURE", + "SYSTEMDRIVE", + "SYSTEMROOT", + "TEMP", + "USERNAME", + "USERPROFILE", + ] + : ["HOME", "LOGNAME", "PATH", "SHELL", "TERM", "USER"]; + + const env: Record = {}; + + for (const key of DEFAULT_INHERITED_ENV_VARS) { + const value = process.env[key]; + if (value === undefined) { + continue; + } + + if (value.startsWith("()")) { + // Skip functions, which are a security risk. + continue; + } + + env[key] = value; + } + + return env; +} + export class MCPAuthenticationClient { private serverUrlHash: string; private authProvider: NodeOAuthClientProvider | null = null; diff --git a/packages/mcp-proxy/src/index.ts b/packages/mcp-proxy/src/index.ts index 6ea643b..4a4e149 100644 --- a/packages/mcp-proxy/src/index.ts +++ b/packages/mcp-proxy/src/index.ts @@ -5,6 +5,7 @@ export * from "./types/index.js"; export { createMCPAuthClient, createMCPProxy, + createMCPStdioProxy, MCPAuthenticationClient, } from "./core/mcp-remote-client.js"; diff --git a/packages/mcp-proxy/src/types/remote-client.ts b/packages/mcp-proxy/src/types/remote-client.ts index 1e7f30e..977bfb5 100644 --- a/packages/mcp-proxy/src/types/remote-client.ts +++ b/packages/mcp-proxy/src/types/remote-client.ts @@ -51,7 +51,7 @@ export interface ProxyConnectionConfig { /** * Transport strategy options */ -export type TransportStrategy = "sse-only" | "http-only" | "sse-first" | "http-first"; +export type TransportStrategy = "sse-only" | "http-only" | "sse-first" | "http-first" | "stdio"; /** * Static OAuth client metadata diff --git a/packages/mcp-proxy/src/utils/mcp-transport-bridge.ts b/packages/mcp-proxy/src/utils/mcp-transport-bridge.ts index c71d575..e312e97 100644 --- a/packages/mcp-proxy/src/utils/mcp-transport-bridge.ts +++ b/packages/mcp-proxy/src/utils/mcp-transport-bridge.ts @@ -23,7 +23,7 @@ export function createMCPTransportBridge( // Forward messages from client to server clientTransport.onmessage = (message: any, extra: any) => { - console.log(JSON.stringify(message)); + console.log(message); log("[Clientβ†’Server]", message.method || message.id); onMessage?.("client-to-server", message); @@ -41,6 +41,7 @@ export function createMCPTransportBridge( // Forward messages from server to client serverTransport.onmessage = (message: any, extra: any) => { + console.log(message); console.log(JSON.stringify(message), JSON.stringify(extra)); log("[Serverβ†’Client]", message.method || message.id); onMessage?.("server-to-client", message); diff --git a/packages/mcp-proxy/src/utils/mcp-transport.ts b/packages/mcp-proxy/src/utils/mcp-transport.ts index f26d6f7..d9ed993 100644 --- a/packages/mcp-proxy/src/utils/mcp-transport.ts +++ b/packages/mcp-proxy/src/utils/mcp-transport.ts @@ -34,13 +34,24 @@ export class RemixMCPTransport implements Transport { throw new Error("Invalid JSON-RPC message"); } - // Emit the message to handler - if (this.onmessage) { - try { - this.onmessage(message); - } catch (error) { - if (this.onerror) { - this.onerror(error as Error); + if (message.method.includes("notifications")) { + this.send({}); + return; + } + + console.log(message, "message"); + + if (Object.keys(message).length === 0) { + this.send({}); + } else { + // Emit the message to handler + if (this.onmessage) { + try { + this.onmessage(message); + } catch (error) { + if (this.onerror) { + this.onerror(error as Error); + } } } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f3a8e6f..8069469 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -568,6 +568,9 @@ importers: dotenv: specifier: ^16.4.5 version: 16.5.0 + dotenv-expand: + specifier: ^12.0.2 + version: 12.0.2 esbuild: specifier: ^0.23.0 version: 0.23.1 @@ -6315,6 +6318,10 @@ packages: resolution: {integrity: sha512-GopVGCpVS1UKH75VKHGuQFqS1Gusej0z4FyQkPdwjil2gNIv+LNsqBlboOzpJFZKVT95GkCyWJbBSdFEFUWI2A==} engines: {node: '>=12'} + dotenv-expand@12.0.2: + resolution: {integrity: sha512-lXpXz2ZE1cea1gL4sz2Ipj8y4PiVjytYr3Ij0SWoms1PGxIv7m2CRKuRuCRtHdVuvM/hNJPMxt5PbhboNC4dPQ==} + engines: {node: '>=12'} + dotenv@16.0.3: resolution: {integrity: sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==} engines: {node: '>=12'} @@ -17106,6 +17113,10 @@ snapshots: dotenv-expand@10.0.0: {} + dotenv-expand@12.0.2: + dependencies: + dotenv: 16.5.0 + dotenv@16.0.3: {} dotenv@16.4.7: {} diff --git a/trigger/.env.example b/trigger/.env.example index 1ab3687..78902ca 100644 --- a/trigger/.env.example +++ b/trigger/.env.example @@ -31,7 +31,7 @@ POSTGRES_USER=docker POSTGRES_PASSWORD=docker TRIGGER_DB=trigger -DB_HOST=localhost +DB_HOST=host.docker.internal DB_PORT=5432 DB_SCHEMA=sigma diff --git a/trigger/auth.htpasswd b/trigger/auth.htpasswd new file mode 100644 index 0000000..62dbff9 --- /dev/null +++ b/trigger/auth.htpasswd @@ -0,0 +1 @@ +registry-user:$2y$05$6ingYqw0.3j13dxHY4w3neMSvKhF3pvRmc0AFifScWsVA9JpuLwNK \ No newline at end of file diff --git a/trigger/docker-compose.yaml b/trigger/docker-compose.yaml index 558edb0..56bf19b 100644 --- a/trigger/docker-compose.yaml +++ b/trigger/docker-compose.yaml @@ -146,7 +146,7 @@ services: - webapp volumes: # registry-user:very-secure-indeed - - ../registry/auth.htpasswd:/auth/htpasswd:ro + - ./auth.htpasswd:/auth/htpasswd:ro environment: REGISTRY_AUTH: htpasswd REGISTRY_AUTH_HTPASSWD_REALM: Registry Realm diff --git a/turbo.json b/turbo.json index 9280d93..74d3a93 100644 --- a/turbo.json +++ b/turbo.json @@ -1,6 +1,5 @@ { "$schema": "https://turborepo.com/schema.json", - "ui": "tui", "tasks": { "build": { "dependsOn": [ "^build" ], @@ -52,6 +51,7 @@ "SESSION_SECRET", "APP_ORIGIN", "LOGIN_ORIGIN", + "API_BASE_URL", "POSTHOG_PROJECT_KEY", "AUTH_GOOGLE_CLIENT_ID", "AUTH_GOOGLE_CLIENT_SECRET",