From 8d9ddcf375f736dc97c762da418e08239dd6a2e9 Mon Sep 17 00:00:00 2001 From: Harshith Mullapudi Date: Tue, 22 Jul 2025 10:17:40 +0530 Subject: [PATCH] Fix: core cli to work with core repo (#26) * Fix: core cli to work with core repo * Fix: cli working and mcp proxy --- .env.example | 8 +- apps/webapp/app/components/logo/logo.tsx | 5 - .../app/components/logs/logs-filters.tsx | 2 +- .../app/components/logs/virtual-logs-list.tsx | 2 +- apps/webapp/app/entry.server.tsx | 28 +- apps/webapp/app/root.tsx | 5 - apps/webapp/app/routes/api.v1.mcp.$slug.tsx | 82 ++++-- .../app/routes/api.v1.oauth.callback.mcp.tsx | 6 +- .../app/routes/home.integration.$slug.tsx | 4 +- apps/webapp/app/routes/home.logs.activity.tsx | 4 +- apps/webapp/app/routes/home.logs.all.tsx | 8 +- apps/webapp/app/routes/login.magic.tsx | 4 +- apps/webapp/app/routes/oauth.authorize.tsx | 4 +- .../app/services/integrationAccount.server.ts | 3 + .../webapp/app/services/oauth/oauth.server.ts | 6 +- apps/webapp/app/trigger/utils/mcp.ts | 192 +++++++++++++ apps/webapp/app/utils/oauth2-middleware.ts | 47 +++- apps/webapp/app/utils/startup.ts | 34 +++ apps/webapp/package.json | 4 +- apps/webapp/server.mjs | 1 + apps/webapp/trigger.config.ts | 10 +- apps/webapp/tsconfig.json | 3 +- docker-compose.yaml | 2 +- integrations/linear/spec.json | 8 +- integrations/slack/spec.json | 9 +- packages/core-cli/package.json | 3 +- packages/core-cli/src/commands/init.ts | 106 +++---- packages/core-cli/src/commands/start.ts | 47 ++-- packages/core-cli/src/commands/stop.ts | 42 +-- .../core-cli/src/utils/docker-interactive.ts | 66 ++--- packages/core-cli/src/utils/docker-login.ts | 63 +++++ packages/core-cli/src/utils/env-docker.ts | 48 ++++ packages/core-cli/src/utils/trigger-deploy.ts | 66 +++++ packages/database/package.json | 2 +- .../mcp-proxy/src/core/mcp-remote-client.ts | 263 +++++++++++++++++- packages/mcp-proxy/src/index.ts | 1 + packages/mcp-proxy/src/types/remote-client.ts | 2 +- .../src/utils/mcp-transport-bridge.ts | 3 +- packages/mcp-proxy/src/utils/mcp-transport.ts | 25 +- pnpm-lock.yaml | 11 + trigger/.env.example | 2 +- trigger/auth.htpasswd | 1 + trigger/docker-compose.yaml | 2 +- turbo.json | 1 + 44 files changed, 983 insertions(+), 252 deletions(-) create mode 100644 apps/webapp/app/utils/startup.ts create mode 100644 packages/core-cli/src/utils/docker-login.ts create mode 100644 packages/core-cli/src/utils/env-docker.ts create mode 100644 packages/core-cli/src/utils/trigger-deploy.ts create mode 100644 trigger/auth.htpasswd diff --git a/.env.example b/.env.example index c25d111..754d9b0 100644 --- a/.env.example +++ b/.env.example @@ -12,7 +12,7 @@ POSTGRES_PASSWORD=docker POSTGRES_DB=core LOGIN_ORIGIN=http://localhost:3033 -DATABASE_URL="postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB}?schema=core" +DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB}?schema=core" # This sets the URL used for direct connections to the database and should only be needed in limited circumstances # See: https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#fields:~:text=the%20shadow%20database.-,directUrl,-No @@ -22,6 +22,8 @@ REMIX_APP_PORT=3033 APP_ENV=production NODE_ENV=${APP_ENV} APP_ORIGIN=http://localhost:3033 +API_BASE_URL=${APP_ORIGIN} + SESSION_SECRET=27192e6432564f4788d55c15131bd5ac ENCRYPTION_KEY=27192e6432564f4788d55c15131bd5ac @@ -48,8 +50,8 @@ MAGIC_LINK_SECRET=27192e6432564f4788d55c15131bd5ac NEO4J_AUTH=neo4j/27192e6432564f4788d55c15131bd5ac OLLAMA_URL=http://ollama:11434 -EMBEDDING_MODEL=GPT41 -MODEL=GPT41 +EMBEDDING_MODEL=text-embedding-3-small +MODEL=gpt-4.1-2025-04-14 ## Trigger ## TRIGGER_PROJECT_ID= diff --git a/apps/webapp/app/components/logo/logo.tsx b/apps/webapp/app/components/logo/logo.tsx index ecf4dd6..955489e 100644 --- a/apps/webapp/app/components/logo/logo.tsx +++ b/apps/webapp/app/components/logo/logo.tsx @@ -1,14 +1,9 @@ -import React from "react"; -import { Theme, useTheme } from "remix-themes"; - export interface LogoProps { width: number; height: number; } export default function StaticLogo({ width, height }: LogoProps) { - const [theme] = useTheme(); - return ( setStep("main"); return ( -
+
{ diff --git a/apps/webapp/app/components/logs/virtual-logs-list.tsx b/apps/webapp/app/components/logs/virtual-logs-list.tsx index 3525287..39d5dc1 100644 --- a/apps/webapp/app/components/logs/virtual-logs-list.tsx +++ b/apps/webapp/app/components/logs/virtual-logs-list.tsx @@ -153,7 +153,7 @@ export function VirtualLogsList({ const itemCount = hasMore ? logs.length + 1 : logs.length; return ( -
+
{({ width, height: autoHeight }) => ( { let shellRendered = false; @@ -64,7 +72,7 @@ function handleBotRequest( new Response(stream, { headers: responseHeaders, status: responseStatusCode, - }) + }), ); pipe(body); @@ -81,7 +89,7 @@ function handleBotRequest( console.error(error); } }, - } + }, ); setTimeout(abort, ABORT_DELAY); @@ -92,7 +100,7 @@ function handleBrowserRequest( request: Request, responseStatusCode: number, responseHeaders: Headers, - remixContext: EntryContext + remixContext: EntryContext, ) { return new Promise((resolve, reject) => { let shellRendered = false; @@ -114,7 +122,7 @@ function handleBrowserRequest( new Response(stream, { headers: responseHeaders, status: responseStatusCode, - }) + }), ); pipe(body); @@ -131,7 +139,7 @@ function handleBrowserRequest( console.error(error); } }, - } + }, ); setTimeout(abort, ABORT_DELAY); diff --git a/apps/webapp/app/root.tsx b/apps/webapp/app/root.tsx index 8028f39..d86b699 100644 --- a/apps/webapp/app/root.tsx +++ b/apps/webapp/app/root.tsx @@ -4,7 +4,6 @@ import { Outlet, Scripts, ScrollRestoration, - useLoaderData, } from "@remix-run/react"; import type { LinksFunction, @@ -41,7 +40,6 @@ import { useTheme, } from "remix-themes"; import clsx from "clsx"; -import { initNeo4jSchemaOnce } from "./lib/neo4j.server"; export const links: LinksFunction = () => [{ rel: "stylesheet", href: styles }]; @@ -50,8 +48,6 @@ export const loader = async ({ request }: LoaderFunctionArgs) => { const toastMessage = session.get("toastMessage") as ToastMessage; const { getTheme } = await themeSessionResolver(request); - await initNeo4jSchemaOnce(); - const posthogProjectKey = env.POSTHOG_PROJECT_KEY; return typedjson( @@ -138,7 +134,6 @@ function App() { // `specifiedTheme` is the stored theme in the session storage. // `themeAction` is the action name that's used to change the theme in the session storage. export default function AppWithProviders() { - const data = useLoaderData(); return ( diff --git a/apps/webapp/app/routes/api.v1.mcp.$slug.tsx b/apps/webapp/app/routes/api.v1.mcp.$slug.tsx index ca34659..43831d1 100644 --- a/apps/webapp/app/routes/api.v1.mcp.$slug.tsx +++ b/apps/webapp/app/routes/api.v1.mcp.$slug.tsx @@ -4,6 +4,9 @@ import { getIntegrationDefinitionWithSlug } from "~/services/integrationDefiniti import { proxyRequest } from "~/utils/proxy.server"; import { z } from "zod"; import { getIntegrationAccount } from "~/services/integrationAccount.server"; +import { createMCPStdioProxy } from "@core/mcp-proxy"; +import { randomUUID } from "node:crypto"; +import { configureStdioMCPEnvironment } from "~/trigger/utils/mcp"; export const integrationSlugSchema = z.object({ slug: z.string(), @@ -48,7 +51,7 @@ const { action, loader } = createActionApiRoute( const spec = integrationDefinition.spec as any; - if (!spec.mcpAuth) { + if (!spec.mcp) { return new Response( JSON.stringify({ error: "MCP auth configuration not found for this integration", @@ -60,7 +63,7 @@ const { action, loader } = createActionApiRoute( ); } - const { serverUrl } = spec.mcpAuth; + const { url, type } = spec.mcp; // Find the integration account for this user and integration const integrationAccount = await getIntegrationAccount( @@ -68,29 +71,66 @@ const { action, loader } = createActionApiRoute( authentication.userId, ); - const integrationConfig = - integrationAccount?.integrationConfiguration as any; + if (type === "http") { + const integrationConfig = + integrationAccount?.integrationConfiguration as any; - if (!integrationAccount || !integrationConfig || !integrationConfig.mcp) { - return new Response( - JSON.stringify({ - error: "No integration account with mcp config", - }), - { - status: 400, - headers: { "Content-Type": "application/json" }, - }, + if ( + !integrationAccount || + !integrationConfig || + !integrationConfig.mcp + ) { + return new Response( + JSON.stringify({ + error: "No integration account with mcp config", + }), + { + status: 400, + headers: { "Content-Type": "application/json" }, + }, + ); + } + + // Proxy the request to the serverUrl + return await proxyRequest( + request, + url, + integrationConfig.mcp.tokens.access_token, ); - } + } else { + if (!integrationAccount) { + return new Response( + JSON.stringify({ + error: "No integration account found", + }), + { + status: 400, + headers: { "Content-Type": "application/json" }, + }, + ); + } - // Proxy the request to the serverUrl - return await proxyRequest( - request, - serverUrl, - integrationConfig.mcp.tokens.access_token, - ); + // Configure environment variables using the utility function + const { env, args } = configureStdioMCPEnvironment( + spec, + integrationAccount, + ); + + // Get session_id from headers (case-insensitive), or generate a new uuid if not present + const sessionId = + request.headers.get("mcp-session-id") || + request.headers.get("Mcp-Session-Id") || + randomUUID(); + + // Use the saved local file instead of command + const executablePath = `./integrations/${slug}/main`; + + return createMCPStdioProxy(request, executablePath, args, { + env, + sessionId, + }); + } } catch (error: any) { - console.error("MCP Proxy Error:", error); return new Response(JSON.stringify({ error: error.message }), { status: 500, headers: { "Content-Type": "application/json" }, diff --git a/apps/webapp/app/routes/api.v1.oauth.callback.mcp.tsx b/apps/webapp/app/routes/api.v1.oauth.callback.mcp.tsx index 8859d81..ce0dcca 100644 --- a/apps/webapp/app/routes/api.v1.oauth.callback.mcp.tsx +++ b/apps/webapp/app/routes/api.v1.oauth.callback.mcp.tsx @@ -53,14 +53,14 @@ export async function loader({ request }: LoaderFunctionArgs) { const spec = integrationDefinition.spec as any; - if (!spec.mcpAuth) { + if (!spec.mcp) { throw new Error("MCP auth configuration not found for this integration"); } - const { transportStrategy, serverUrl } = spec.mcpAuth; + const { transportStrategy, url } = spec.mcp; const authClient = createMCPAuthClient({ - serverUrl, + serverUrl: url, transportStrategy: transportStrategy || "sse-first", redirectUrl: MCP_CALLBACK_URL, }); diff --git a/apps/webapp/app/routes/home.integration.$slug.tsx b/apps/webapp/app/routes/home.integration.$slug.tsx index 2748d33..fc1e453 100644 --- a/apps/webapp/app/routes/home.integration.$slug.tsx +++ b/apps/webapp/app/routes/home.integration.$slug.tsx @@ -137,7 +137,9 @@ export default function IntegrationDetail() { ); const hasApiKey = !!specData?.auth?.api_key; const hasOAuth2 = !!specData?.auth?.OAuth2; - const hasMCPAuth = !!specData?.mcpAuth; + const hasMCPAuth = !!( + specData?.mcp.type === "url" && specData?.mcp.needsAuth + ); const Component = getIcon(integration.icon as IconType); return ( diff --git a/apps/webapp/app/routes/home.logs.activity.tsx b/apps/webapp/app/routes/home.logs.activity.tsx index c2e17e8..e780b37 100644 --- a/apps/webapp/app/routes/home.logs.activity.tsx +++ b/apps/webapp/app/routes/home.logs.activity.tsx @@ -75,9 +75,9 @@ export default function LogsActivity() { )} {/* Logs List */} -
+
{logs.length === 0 ? ( - +
diff --git a/apps/webapp/app/routes/home.logs.all.tsx b/apps/webapp/app/routes/home.logs.all.tsx index 2074ef9..8ab7d83 100644 --- a/apps/webapp/app/routes/home.logs.all.tsx +++ b/apps/webapp/app/routes/home.logs.all.tsx @@ -45,7 +45,7 @@ export default function LogsAll() { }, ]} /> -
+
{isInitialLoad ? ( <> {" "} @@ -64,10 +64,10 @@ export default function LogsAll() { /> )} {/* Logs List */} -
+
{logs.length === 0 ? ( - - + +

diff --git a/apps/webapp/app/routes/login.magic.tsx b/apps/webapp/app/routes/login.magic.tsx index 1c836c0..f54ca44 100644 --- a/apps/webapp/app/routes/login.magic.tsx +++ b/apps/webapp/app/routes/login.magic.tsx @@ -158,7 +158,8 @@ export default function LoginMagicLinkPage() {
} + confirmButton={
diff --git a/apps/webapp/app/routes/oauth.authorize.tsx b/apps/webapp/app/routes/oauth.authorize.tsx index 4a95168..cb03a99 100644 --- a/apps/webapp/app/routes/oauth.authorize.tsx +++ b/apps/webapp/app/routes/oauth.authorize.tsx @@ -199,13 +199,13 @@ export default function OAuthAuthorize() {

Permissions

    - {params.scope?.split(",").map((scope, index, arr) => { + {params.scope?.split(" ").map((scope, index, arr) => { const isFirst = index === 0; const isLast = index === arr.length - 1; return (
  • {getIcon(scope)}
    diff --git a/apps/webapp/app/services/integrationAccount.server.ts b/apps/webapp/app/services/integrationAccount.server.ts index d07e1b8..fb1d3ec 100644 --- a/apps/webapp/app/services/integrationAccount.server.ts +++ b/apps/webapp/app/services/integrationAccount.server.ts @@ -10,6 +10,9 @@ export const getIntegrationAccount = async ( integratedById: userId, isActive: true, }, + include: { + integrationDefinition: true, + }, }); }; diff --git a/apps/webapp/app/services/oauth/oauth.server.ts b/apps/webapp/app/services/oauth/oauth.server.ts index 7ced15b..0980729 100644 --- a/apps/webapp/app/services/oauth/oauth.server.ts +++ b/apps/webapp/app/services/oauth/oauth.server.ts @@ -278,14 +278,14 @@ export async function getRedirectURLForMCP( const spec = integrationDefinition.spec as any; - if (!spec.mcpAuth) { + if (!spec.mcp) { throw new Error("MCP auth configuration not found for this integration"); } - const { serverUrl, transportStrategy } = spec.mcpAuth; + const { url, transportStrategy } = spec.mcp; const authClient = createMCPAuthClient({ - serverUrl, + serverUrl: url, transportStrategy: transportStrategy || "sse-first", redirectUrl: MCP_CALLBACK_URL, }); diff --git a/apps/webapp/app/trigger/utils/mcp.ts b/apps/webapp/app/trigger/utils/mcp.ts index de0441c..a51db18 100644 --- a/apps/webapp/app/trigger/utils/mcp.ts +++ b/apps/webapp/app/trigger/utils/mcp.ts @@ -1,9 +1,69 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ import { logger } from "@trigger.dev/sdk/v3"; import { jsonSchema, tool, type ToolSet } from "ai"; +import * as fs from "fs"; +import * as path from "path"; import { type MCPTool } from "./types"; import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js"; +import { prisma } from "~/db.server"; + +export const configureStdioMCPEnvironment = ( + spec: any, + account: any, +): { env: Record; args: any[] } => { + if (!spec.mcp) { + return { env: {}, args: [] }; + } + + const mcpSpec = spec.mcp; + const configuredMCP = { ...mcpSpec }; + + // Replace config placeholders in environment variables + if (configuredMCP.env) { + for (const [key, value] of Object.entries(configuredMCP.env)) { + if (typeof value === "string" && value.includes("${config:")) { + // Extract the config key from the placeholder + const configKey = value.match(/\$\{config:(.*?)\}/)?.[1]; + if ( + configKey && + account.integrationConfiguration && + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (account.integrationConfiguration as any)[configKey] + ) { + configuredMCP.env[key] = value.replace( + `\${config:${configKey}}`, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (account.integrationConfiguration as any)[configKey], + ); + } + } + + if (typeof value === "string" && value.includes("${integrationConfig:")) { + // Extract the config key from the placeholder + const configKey = value.match(/\$\{integrationConfig:(.*?)\}/)?.[1]; + if ( + configKey && + account.integrationDefinition.config && + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (account.integrationDefinition.config as any)[configKey] + ) { + configuredMCP.env[key] = value.replace( + `\${integrationConfig:${configKey}}`, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (account.integrationDefinition.config as any)[configKey], + ); + } + } + } + } + + return { + env: configuredMCP.env || {}, + args: Array.isArray(configuredMCP.args) ? configuredMCP.args : [], + }; +}; + export class MCP { private Client: any; private clients: Record = {}; @@ -133,3 +193,135 @@ export class MCP { } } } + +export const getIntegrationStdioFile = async ( + integrationDefinitionSlug: string, +) => { + // If the file is in public/integrations/[slug]/main, it is served at /integrations/[slug]/main + return `/integrations/${integrationDefinitionSlug}/main`; +}; + +export const fetchAndSaveStdioIntegrations = async () => { + try { + logger.info("Starting stdio integrations fetch and save process"); + + // Get all integration definitions + const integrationDefinitions = + await prisma.integrationDefinitionV2.findMany({ + where: { + deleted: null, // Only active integrations + }, + }); + + logger.info( + `Found ${integrationDefinitions.length} integration definitions`, + ); + + for (const integration of integrationDefinitions) { + try { + const spec = integration.spec as any; + + // Check if this integration has MCP config and is stdio type + if (spec?.mcp?.type === "stdio" && spec?.mcp?.url) { + logger.info(`Processing stdio integration: ${integration.slug}`); + + const integrationDir = path.join( + process.cwd(), + "integrations", + integration.slug, + ); + const targetFile = path.join(integrationDir, "main"); + + // Create directory if it doesn't exist + if (!fs.existsSync(integrationDir)) { + fs.mkdirSync(integrationDir, { recursive: true }); + logger.info(`Created directory: ${integrationDir}`); + } + + // Skip if file already exists + if (fs.existsSync(targetFile)) { + logger.info( + `Integration ${integration.slug} already exists, skipping`, + ); + continue; + } + + const urlOrPath = spec.mcp.url; + + // If urlOrPath looks like a URL, use fetch, otherwise treat as local path + let isUrl = false; + try { + // Try to parse as URL + const parsed = new URL(urlOrPath); + isUrl = ["http:", "https:"].includes(parsed.protocol); + } catch { + isUrl = false; + } + + if (isUrl) { + // Fetch the URL content + logger.info(`Fetching content from URL: ${urlOrPath}`); + const response = await fetch(urlOrPath); + + if (!response.ok) { + logger.error( + `Failed to fetch ${urlOrPath}: ${response.status} ${response.statusText}`, + ); + continue; + } + + const content = await response.text(); + + // Save the content to the target file + fs.writeFileSync(targetFile, content); + + // Make the file executable if it's a script + if (process.platform !== "win32") { + fs.chmodSync(targetFile, "755"); + } + + logger.info( + `Successfully saved stdio integration: ${integration.slug} to ${targetFile}`, + ); + } else { + // Treat as local file path + const sourcePath = path.isAbsolute(urlOrPath) + ? urlOrPath + : path.join(process.cwd(), urlOrPath); + + logger.info(`Copying content from local path: ${sourcePath}`); + + if (!fs.existsSync(sourcePath)) { + logger.error(`Source file does not exist: ${sourcePath}`); + continue; + } + + fs.copyFileSync(sourcePath, targetFile); + + // Make the file executable if it's a script + if (process.platform !== "win32") { + fs.chmodSync(targetFile, "755"); + } + + logger.info( + `Successfully copied stdio integration: ${integration.slug} to ${targetFile}`, + ); + } + } else { + logger.debug( + `Skipping integration ${integration.slug}: not a stdio type or missing URL`, + ); + } + } catch (error) { + logger.error(`Error processing integration ${integration.slug}:`, { + error, + }); + } + } + + logger.info("Completed stdio integrations fetch and save process"); + } catch (error) { + logger.error("Failed to fetch and save stdio integrations:", { error }); + throw error; + } +}; diff --git a/apps/webapp/app/utils/oauth2-middleware.ts b/apps/webapp/app/utils/oauth2-middleware.ts index 2131262..29c4c87 100644 --- a/apps/webapp/app/utils/oauth2-middleware.ts +++ b/apps/webapp/app/utils/oauth2-middleware.ts @@ -24,11 +24,14 @@ export interface OAuth2Context { export async function requireOAuth2(request: Request): Promise { const authHeader = request.headers.get("authorization"); - + if (!authHeader || !authHeader.startsWith("Bearer ")) { throw json( - { error: "invalid_token", error_description: "Missing or invalid authorization header" }, - { status: 401 } + { + error: "invalid_token", + error_description: "Missing or invalid authorization header", + }, + { status: 401 }, ); } @@ -36,7 +39,7 @@ export async function requireOAuth2(request: Request): Promise { try { const accessToken = await oauth2Service.validateAccessToken(token); - + return { user: { id: accessToken.user.id, @@ -59,13 +62,18 @@ export async function requireOAuth2(request: Request): Promise { }; } catch (error) { throw json( - { error: "invalid_token", error_description: "Invalid or expired access token" }, - { status: 401 } + { + error: "invalid_token", + error_description: "Invalid or expired access token", + }, + { status: 401 }, ); } } -export async function getOAuth2Context(request: Request): Promise { +export async function getOAuth2Context( + request: Request, +): Promise { try { return await requireOAuth2(request); } catch (error) { @@ -73,20 +81,31 @@ export async function getOAuth2Context(request: Request): Promise console.log(`Express server listening at http://localhost:${port}`), diff --git a/apps/webapp/trigger.config.ts b/apps/webapp/trigger.config.ts index 14bf8f6..3fffe82 100644 --- a/apps/webapp/trigger.config.ts +++ b/apps/webapp/trigger.config.ts @@ -27,8 +27,16 @@ export default defineConfig({ build: { extensions: [ syncEnvVars(() => ({ - DATABASE_URL: process.env.DATABASE_URL as string, + ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY as string, API_BASE_URL: process.env.API_BASE_URL as string, + DATABASE_URL: process.env.DATABASE_URL as string, + EMBEDDING_MODEL: process.env.EMBEDDING_MODEL as string, + ENCRYPTION_KEY: process.env.ENCRYPTION_KEY as string, + MODEL: process.env.MODEL ?? "gpt-4.1-2025-04-14", + NEO4J_PASSWORD: process.env.NEO4J_PASSWORD as string, + NEO4J_URI: process.env.NEO4J_URI as string, + NEO4J_USERNAME: process.env.NEO4J_USERNAME as string, + OPENAI_API_KEY: process.env.OPENAI_API_KEY as string, })), prismaExtension({ schema: "prisma/schema.prisma", diff --git a/apps/webapp/tsconfig.json b/apps/webapp/tsconfig.json index 8adba20..630a90c 100644 --- a/apps/webapp/tsconfig.json +++ b/apps/webapp/tsconfig.json @@ -7,7 +7,8 @@ "**/*.tsx", "tailwind.config.js", "tailwind.config.js", - "trigger.config.ts" + "trigger.config.ts", + "server.mjs" ], "compilerOptions": { "types": ["@remix-run/node", "vite/client"], diff --git a/docker-compose.yaml b/docker-compose.yaml index ee17774..628b027 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -40,7 +40,7 @@ services: postgres: container_name: core-postgres - image: redplanethq/postgres:0.1.0 + image: redplanethq/postgres:0.1.2 environment: - POSTGRES_USER=${POSTGRES_USER} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} diff --git a/integrations/linear/spec.json b/integrations/linear/spec.json index 9dd6b86..fa70852 100644 --- a/integrations/linear/spec.json +++ b/integrations/linear/spec.json @@ -12,9 +12,9 @@ "label": "Linear API Key" } }, - "mcpAuth": { - "serverUrl": "https://mcp.linear.app/sse", - "transportStrategy": "sse-first", - "needsSeparateAuth": true + "mcp": { + "type": "http", + "url": "https://mcp.linear.app/mcp", + "needsAuth": true } } \ No newline at end of file diff --git a/integrations/slack/spec.json b/integrations/slack/spec.json index 34a46c3..ccf64b5 100644 --- a/integrations/slack/spec.json +++ b/integrations/slack/spec.json @@ -4,12 +4,11 @@ "description": "Connect your workspace to Slack. Run your workflows from slack bookmarks", "icon": "slack", "mcp": { - "command": "npx", - "args": ["-y", "@modelcontextprotocol/server-slack"], + "type": "stdio", + "url": "", + "args": [ ], "env": { - "SLACK_BOT_TOKEN": "${config:access_token}", - "SLACK_TEAM_ID": "${config:team_id}", - "SLACK_CHANNEL_IDS": "${config:channel_ids}" + "SLACK_MCP_XOXP_TOKEN": "${config:access_token}" } }, "auth": { diff --git a/packages/core-cli/package.json b/packages/core-cli/package.json index f2c905e..5c9628c 100644 --- a/packages/core-cli/package.json +++ b/packages/core-cli/package.json @@ -1,6 +1,6 @@ { "name": "@redplanethq/core", - "version": "0.1.1", + "version": "0.1.3", "description": "A Command-Line Interface for Core", "type": "module", "license": "MIT", @@ -89,6 +89,7 @@ "commander": "^9.4.1", "defu": "^6.1.4", "dotenv": "^16.4.5", + "dotenv-expand": "^12.0.2", "esbuild": "^0.23.0", "eventsource": "^3.0.2", "evt": "^2.4.13", diff --git a/packages/core-cli/src/commands/init.ts b/packages/core-cli/src/commands/init.ts index 87b34e8..abf2125 100644 --- a/packages/core-cli/src/commands/init.ts +++ b/packages/core-cli/src/commands/init.ts @@ -1,10 +1,13 @@ import { intro, outro, text, confirm, spinner, note, log } from "@clack/prompts"; import { fileExists, updateEnvFile } from "../utils/file.js"; import { checkPostgresHealth } from "../utils/docker.js"; -import { executeDockerCommandInteractive } from "../utils/docker-interactive.js"; +import { executeCommandInteractive } from "../utils/docker-interactive.js"; import { printCoreBrainLogo } from "../utils/ascii.js"; import { setupEnvFile } from "../utils/env.js"; import { hasTriggerConfig } from "../utils/env-checker.js"; +import { getDockerCompatibleEnvVars } from "../utils/env-docker.js"; +import { handleDockerLogin } from "../utils/docker-login.js"; +import { deployTriggerTasks } from "../utils/trigger-deploy.js"; import path from "path"; export async function initCommand() { @@ -14,20 +17,29 @@ export async function initCommand() { intro("πŸš€ Core Development Environment Setup"); // Step 1: Confirm this is the Core repository - note("Please ensure you have:\nβ€’ Docker and Docker Compose installed\nβ€’ Git installed\nβ€’ pnpm package manager installed\nβ€’ You are in the Core repository directory", "πŸ“‹ Prerequisites"); - + note( + "Please ensure you have:\nβ€’ Docker and Docker Compose installed\nβ€’ Git installed\nβ€’ pnpm package manager installed\nβ€’ You are in the Core repository directory", + "πŸ“‹ Prerequisites" + ); + const isCoreRepo = await confirm({ message: "Are you currently in the Core repository directory?", }); if (!isCoreRepo) { - note("Please clone the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run 'core init' again.", "πŸ“₯ Clone Repository"); + note( + "Please clone the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run 'core init' again.", + "πŸ“₯ Clone Repository" + ); outro("❌ Setup cancelled. Please navigate to the Core repository first."); process.exit(1); } const rootDir = process.cwd(); const triggerDir = path.join(rootDir, "trigger"); + const webappDir = path.join(rootDir, "apps", "webapp"); + const databaseDir = path.join(rootDir, "packages", "database"); + const typesDir = path.join(rootDir, "packages", "types"); try { // Step 2: Setup .env file in root @@ -51,7 +63,7 @@ export async function initCommand() { // Step 3: Docker compose up -d in root try { - await executeDockerCommandInteractive("docker compose up -d", { + await executeCommandInteractive("docker compose up -d", { cwd: rootDir, message: "Starting Docker containers in root...", showOutput: true, @@ -103,7 +115,7 @@ export async function initCommand() { // Step 6: Docker compose up for trigger try { - await executeDockerCommandInteractive("docker compose up -d", { + await executeCommandInteractive("docker compose up -d", { cwd: triggerDir, message: "Starting Trigger.dev containers...", showOutput: true, @@ -175,6 +187,16 @@ export async function initCommand() { }, }); + const openaiApiKey = await text({ + message: "Enter your OpenAI API Key:", + validate: (value) => { + if (!value || value.length === 0) { + return "OpenAI API Key is required"; + } + return; + }, + }); + // Step 11: Update .env with project details const s6 = spinner(); s6.start("Updating .env with Trigger.dev configuration..."); @@ -182,6 +204,7 @@ export async function initCommand() { try { await updateEnvFile(envPath, "TRIGGER_PROJECT_ID", projectId as string); await updateEnvFile(envPath, "TRIGGER_SECRET_KEY", secretKey as string); + await updateEnvFile(envPath, "OPENAI_API_KEY", openaiApiKey as string); s6.stop("βœ… Updated .env with Trigger.dev configuration"); } catch (error: any) { s6.stop("❌ Failed to update .env file"); @@ -190,13 +213,13 @@ export async function initCommand() { // Step 12: Restart root docker-compose with new configuration try { - await executeDockerCommandInteractive("docker compose down", { + await executeCommandInteractive("docker compose down", { cwd: rootDir, message: "Stopping Core services...", showOutput: true, }); - await executeDockerCommandInteractive("docker compose up -d", { + await executeCommandInteractive("docker compose up -d", { cwd: rootDir, message: "Starting Core services with new Trigger.dev configuration...", showOutput: true, @@ -206,73 +229,12 @@ export async function initCommand() { } } - // Step 13: Show docker login instructions + // Step 13: Handle Docker login note("Run the following command to login to Docker registry:", "🐳 Docker Registry Login"); - - try { - // Read env file to get docker registry details - const envContent = await import("fs").then((fs) => - fs.promises.readFile(triggerEnvPath, "utf8") - ); - const envLines = envContent.split("\n"); - - const getEnvValue = (key: string) => { - const line = envLines.find((l) => l.startsWith(`${key}=`)); - return line ? line.split("=")[1] : ""; - }; - - const dockerRegistryUrl = getEnvValue("DOCKER_REGISTRY_URL"); - const dockerRegistryUsername = getEnvValue("DOCKER_REGISTRY_USERNAME"); - const dockerRegistryPassword = getEnvValue("DOCKER_REGISTRY_PASSWORD"); - - log.info( - `docker login ${dockerRegistryUrl} -u ${dockerRegistryUsername} -p ${dockerRegistryPassword}` - ); - } catch (error) { - log.info("docker login -u -p "); - } - - const dockerLoginConfirmed = await confirm({ - message: "Have you completed the Docker login successfully?", - }); - - if (!dockerLoginConfirmed) { - outro("❌ Setup cancelled. Please complete Docker login first and run the command again."); - process.exit(1); - } + await handleDockerLogin(triggerEnvPath); // Step 14: Deploy Trigger.dev tasks - note( - "We'll now deploy the trigger tasks to your Trigger.dev instance.", - "πŸš€ Deploying Trigger.dev tasks" - ); - - try { - // Login to trigger.dev CLI - await executeDockerCommandInteractive( - "npx -y trigger.dev@v4-beta login -a http://localhost:8030", - { - cwd: rootDir, - message: "Logging in to Trigger.dev CLI...", - showOutput: true, - } - ); - - // Deploy trigger tasks - await executeDockerCommandInteractive("pnpm trigger:deploy", { - cwd: rootDir, - message: "Deploying Trigger.dev tasks...", - showOutput: true, - }); - - log.success("Trigger.dev tasks deployed successfully!"); - } catch (error: any) { - log.warning("Failed to deploy Trigger.dev tasks:"); - note( - `${error.message}\n\nYou can deploy them manually later with:\n1. npx trigger.dev@v4-beta login -a http://localhost:8030\n2. pnpm trigger:deploy`, - "Manual Deployment" - ); - } + await deployTriggerTasks(rootDir); // Step 15: Final instructions outro("πŸŽ‰ Setup Complete!"); diff --git a/packages/core-cli/src/commands/start.ts b/packages/core-cli/src/commands/start.ts index 2d061ae..00cbbef 100644 --- a/packages/core-cli/src/commands/start.ts +++ b/packages/core-cli/src/commands/start.ts @@ -1,50 +1,55 @@ -import { intro, outro, note, log, confirm } from '@clack/prompts'; -import { executeDockerCommandInteractive } from '../utils/docker-interactive.js'; -import { printCoreBrainLogo } from '../utils/ascii.js'; -import path from 'path'; +import { intro, outro, note, log, confirm } from "@clack/prompts"; +import { executeCommandInteractive } from "../utils/docker-interactive.js"; +import { printCoreBrainLogo } from "../utils/ascii.js"; +import path from "path"; export async function startCommand() { // Display the CORE brain logo printCoreBrainLogo(); - - intro('πŸš€ Starting Core Development Environment'); + + intro("πŸš€ Starting Core Development Environment"); // Step 1: Confirm this is the Core repository const isCoreRepo = await confirm({ - message: 'Are you currently in the Core repository directory?', + message: "Are you currently in the Core repository directory?", }); if (!isCoreRepo) { - note('Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core start" again.', 'πŸ“₯ Core Repository Required'); - outro('❌ Please navigate to the Core repository first.'); + note( + 'Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core start" again.', + "πŸ“₯ Core Repository Required" + ); + outro("❌ Please navigate to the Core repository first."); process.exit(1); } const rootDir = process.cwd(); - const triggerDir = path.join(rootDir, 'trigger'); + const triggerDir = path.join(rootDir, "trigger"); try { // Start main services - await executeDockerCommandInteractive('docker compose up -d', { + await executeCommandInteractive("docker compose up -d", { cwd: rootDir, - message: 'Starting Core services...', - showOutput: true + message: "Starting Core services...", + showOutput: true, }); // Start trigger services - await executeDockerCommandInteractive('docker compose up -d', { + await executeCommandInteractive("docker compose up -d", { cwd: triggerDir, - message: 'Starting Trigger.dev services...', - showOutput: true + message: "Starting Trigger.dev services...", + showOutput: true, }); // Final success message - outro('πŸŽ‰ Core Development Environment Started!'); - note('β€’ Core Application: http://localhost:3033\nβ€’ Trigger.dev: http://localhost:8030\nβ€’ PostgreSQL: localhost:5432', '🌐 Your services are now running'); - log.success('Happy coding!'); - + outro("πŸŽ‰ Core Development Environment Started!"); + note( + "β€’ Core Application: http://localhost:3033\nβ€’ Trigger.dev: http://localhost:8030\nβ€’ PostgreSQL: localhost:5432", + "🌐 Your services are now running" + ); + log.success("Happy coding!"); } catch (error: any) { outro(`❌ Failed to start services: ${error.message}`); process.exit(1); } -} \ No newline at end of file +} diff --git a/packages/core-cli/src/commands/stop.ts b/packages/core-cli/src/commands/stop.ts index aa1b2a6..111222b 100644 --- a/packages/core-cli/src/commands/stop.ts +++ b/packages/core-cli/src/commands/stop.ts @@ -1,50 +1,52 @@ -import { intro, outro, log, confirm, note } from '@clack/prompts'; -import { executeDockerCommandInteractive } from '../utils/docker-interactive.js'; -import { printCoreBrainLogo } from '../utils/ascii.js'; -import path from 'path'; +import { intro, outro, log, confirm, note } from "@clack/prompts"; +import { executeCommandInteractive } from "../utils/docker-interactive.js"; +import { printCoreBrainLogo } from "../utils/ascii.js"; +import path from "path"; export async function stopCommand() { // Display the CORE brain logo printCoreBrainLogo(); - - intro('πŸ›‘ Stopping Core Development Environment'); + + intro("πŸ›‘ Stopping Core Development Environment"); // Step 1: Confirm this is the Core repository const isCoreRepo = await confirm({ - message: 'Are you currently in the Core repository directory?', + message: "Are you currently in the Core repository directory?", }); if (!isCoreRepo) { - note('Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core stop" again.', 'πŸ“₯ Core Repository Required'); - outro('❌ Please navigate to the Core repository first.'); + note( + 'Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core stop" again.', + "πŸ“₯ Core Repository Required" + ); + outro("❌ Please navigate to the Core repository first."); process.exit(1); } const rootDir = process.cwd(); - const triggerDir = path.join(rootDir, 'trigger'); + const triggerDir = path.join(rootDir, "trigger"); try { // Stop trigger services first - await executeDockerCommandInteractive('docker compose down', { + await executeCommandInteractive("docker compose down", { cwd: triggerDir, - message: 'Stopping Trigger.dev services...', - showOutput: true + message: "Stopping Trigger.dev services...", + showOutput: true, }); // Stop main services - await executeDockerCommandInteractive('docker compose down', { + await executeCommandInteractive("docker compose down", { cwd: rootDir, - message: 'Stopping Core services...', - showOutput: true + message: "Stopping Core services...", + showOutput: true, }); // Final success message - outro('πŸŽ‰ Core Development Environment Stopped!'); - log.success('All services have been stopped.'); + outro("πŸŽ‰ Core Development Environment Stopped!"); + log.success("All services have been stopped."); log.info('Run "core start" to start services again.'); - } catch (error: any) { outro(`❌ Failed to stop services: ${error.message}`); process.exit(1); } -} \ No newline at end of file +} diff --git a/packages/core-cli/src/utils/docker-interactive.ts b/packages/core-cli/src/utils/docker-interactive.ts index e589561..2525026 100644 --- a/packages/core-cli/src/utils/docker-interactive.ts +++ b/packages/core-cli/src/utils/docker-interactive.ts @@ -1,46 +1,45 @@ -import { spawn, ChildProcess } from 'child_process'; -import { spinner } from '@clack/prompts'; +import { spawn, ChildProcess } from "child_process"; +import { spinner } from "@clack/prompts"; -export interface DockerCommandOptions { +export interface CommandOptions { cwd: string; message: string; showOutput?: boolean; + env?: Record; } -export function executeDockerCommandInteractive( - command: string, - options: DockerCommandOptions -): Promise { +export function executeCommandInteractive(command: string, options: CommandOptions): Promise { return new Promise((resolve, reject) => { const s = spinner(); s.start(options.message); // Split command into parts - const parts = command.split(' '); + const parts = command.split(" "); const cmd = parts[0]; const args = parts.slice(1); - + if (!cmd) { - reject(new Error('Invalid command')); + reject(new Error("Invalid command")); return; } const child: ChildProcess = spawn(cmd, args, { cwd: options.cwd, - stdio: options.showOutput ? ['ignore', 'pipe', 'pipe'] : 'ignore', - detached: false + stdio: options.showOutput ? ["ignore", "pipe", "pipe"] : "ignore", + detached: false, + env: options.env ? { ...process.env, ...options.env } : { ...process.env }, }); - let output = ''; + let output = ""; // Handle stdout if (child.stdout && options.showOutput) { - child.stdout.on('data', (data: Buffer) => { + child.stdout.on("data", (data: Buffer) => { const text = data.toString(); output += text; - + // Update spinner with latest output line - const lines = text.trim().split('\n'); + const lines = text.trim().split("\n"); const lastLine = lines[lines.length - 1]; if (lastLine && lastLine.trim()) { s.message(`${options.message}\n${lastLine.trim()}`); @@ -50,12 +49,13 @@ export function executeDockerCommandInteractive( // Handle stderr if (child.stderr && options.showOutput) { - child.stderr.on('data', (data: Buffer) => { + child.stderr.on("data", (data: Buffer) => { const text = data.toString(); output += text; - + // console.log(text); + // Update spinner with error output - const lines = text.trim().split('\n'); + const lines = text.trim().split("\n"); const lastLine = lines[lines.length - 1]; if (lastLine && lastLine.trim()) { s.message(`${options.message}\n❌ ${lastLine.trim()}`); @@ -64,14 +64,14 @@ export function executeDockerCommandInteractive( } // Handle process exit - child.on('exit', (code: number | null) => { + child.on("exit", (code: number | null) => { if (code === 0) { - s.stop(`βœ… ${options.message.replace(/\.\.\.$/, '')} completed`); + s.stop(`βœ… ${options.message.replace(/\.\.\.$/, "")} completed`); resolve(); } else { - s.stop(`❌ ${options.message.replace(/\.\.\.$/, '')} failed (exit code: ${code})`); + s.stop(`❌ ${options.message.replace(/\.\.\.$/, "")} failed (exit code: ${code})`); if (options.showOutput && output) { - console.log('\nOutput:'); + console.log("\nOutput:"); console.log(output); } reject(new Error(`Command failed with exit code ${code}`)); @@ -79,30 +79,30 @@ export function executeDockerCommandInteractive( }); // Handle errors - child.on('error', (error: Error) => { - s.stop(`❌ ${options.message.replace(/\.\.\.$/, '')} failed`); + child.on("error", (error: Error) => { + s.stop(`❌ ${options.message.replace(/\.\.\.$/, "")} failed`); reject(error); }); // Handle Ctrl+C const handleSigint = () => { - s.stop(`⏹️ ${options.message.replace(/\.\.\.$/, '')} interrupted`); - child.kill('SIGTERM'); - + s.stop(`⏹️ ${options.message.replace(/\.\.\.$/, "")} interrupted`); + child.kill("SIGTERM"); + // Give the process time to clean up setTimeout(() => { if (child.killed === false) { - child.kill('SIGKILL'); + child.kill("SIGKILL"); } process.exit(130); // Standard exit code for SIGINT }, 5000); }; - process.on('SIGINT', handleSigint); + process.on("SIGINT", handleSigint); // Clean up event listener when done - child.on('exit', () => { - process.off('SIGINT', handleSigint); + child.on("exit", () => { + process.off("SIGINT", handleSigint); }); }); -} \ No newline at end of file +} diff --git a/packages/core-cli/src/utils/docker-login.ts b/packages/core-cli/src/utils/docker-login.ts new file mode 100644 index 0000000..84af43c --- /dev/null +++ b/packages/core-cli/src/utils/docker-login.ts @@ -0,0 +1,63 @@ +import { confirm, log } from "@clack/prompts"; +import path from "path"; +import os from "os"; +import fs from "fs"; + +export async function handleDockerLogin(triggerEnvPath: string): Promise { + // Check if Docker is already logged in to localhost:5000 + let dockerLoginNeeded = true; + try { + const dockerConfigPath = process.env.DOCKER_CONFIG + ? path.join(process.env.DOCKER_CONFIG, "config.json") + : path.join(os.homedir(), ".docker", "config.json"); + + if (fs.existsSync(dockerConfigPath)) { + const configContent = await fs.promises.readFile(dockerConfigPath, "utf8"); + const config = JSON.parse(configContent); + if ( + config && + config.auths && + Object.prototype.hasOwnProperty.call(config.auths, "localhost:5000") + ) { + dockerLoginNeeded = false; + } + } + } catch (error) { + // Ignore errors, will prompt for login below + } + + if (dockerLoginNeeded) { + try { + // Read env file to get docker registry details + const envContent = await fs.promises.readFile(triggerEnvPath, "utf8"); + const envLines = envContent.split("\n"); + + const getEnvValue = (key: string) => { + const line = envLines.find((l) => l.startsWith(`${key}=`)); + return line ? line.split("=")[1] : ""; + }; + + const dockerRegistryUrl = getEnvValue("DOCKER_REGISTRY_URL"); + const dockerRegistryUsername = getEnvValue("DOCKER_REGISTRY_USERNAME"); + const dockerRegistryPassword = getEnvValue("DOCKER_REGISTRY_PASSWORD"); + + log.info( + `docker login -u ${dockerRegistryUsername} -p ${dockerRegistryPassword} ${dockerRegistryUrl} ` + ); + } catch (error) { + log.info("docker login -u -p "); + } + } else { + log.info("βœ… Docker is already logged in to localhost:5000, skipping login prompt."); + } + + const dockerLoginConfirmed = await confirm({ + message: "Have you completed the Docker login successfully?", + }); + + if (!dockerLoginConfirmed) { + throw new Error( + "Docker login required. Please complete Docker login first and run the command again." + ); + } +} diff --git a/packages/core-cli/src/utils/env-docker.ts b/packages/core-cli/src/utils/env-docker.ts new file mode 100644 index 0000000..06f767d --- /dev/null +++ b/packages/core-cli/src/utils/env-docker.ts @@ -0,0 +1,48 @@ +import path from "path"; +import fs from "fs"; +import dotenv from "dotenv"; +import dotenvExpand from "dotenv-expand"; + +/** + * Reads environment variables from .env file and replaces localhost URLs with host.docker.internal + * for Docker container compatibility + */ +export async function getDockerCompatibleEnvVars(rootDir: string): Promise> { + const envPath = path.join(rootDir, ".env"); + + try { + // Use dotenv to parse and expand variables + + const envVarsExpand = + dotenvExpand.expand(dotenv.config({ path: envPath, processEnv: {} })).parsed || {}; + + const getEnvValue = (key: string): string => { + return envVarsExpand[key] || ""; + }; + + const replaceLocalhostWithDockerHost = (value: string): string => { + return value + .replace(/localhost/g, "host.docker.internal") + .replace(/127\.0\.0\.1/g, "host.docker.internal"); + }; + + // Get all required environment variables + const envVars = { + ANTHROPIC_API_KEY: getEnvValue("ANTHROPIC_API_KEY"), + API_BASE_URL: replaceLocalhostWithDockerHost(getEnvValue("API_BASE_URL")), + DATABASE_URL: replaceLocalhostWithDockerHost(getEnvValue("DATABASE_URL")), + EMBEDDING_MODEL: getEnvValue("EMBEDDING_MODEL"), + ENCRYPTION_KEY: getEnvValue("ENCRYPTION_KEY"), + MODEL: getEnvValue("MODEL") || "gpt-4.1-2025-04-14", + NEO4J_PASSWORD: getEnvValue("NEO4J_PASSWORD"), + NEO4J_URI: replaceLocalhostWithDockerHost(getEnvValue("NEO4J_URI")), + NEO4J_USERNAME: getEnvValue("NEO4J_USERNAME"), + OPENAI_API_KEY: getEnvValue("OPENAI_API_KEY"), + TRIGGER_PROJECT_ID: getEnvValue("TRIGGER_PROJECT_ID"), + }; + + return envVars; + } catch (error) { + throw new Error(`Failed to read .env file: ${error}`); + } +} diff --git a/packages/core-cli/src/utils/trigger-deploy.ts b/packages/core-cli/src/utils/trigger-deploy.ts new file mode 100644 index 0000000..ed9dc2a --- /dev/null +++ b/packages/core-cli/src/utils/trigger-deploy.ts @@ -0,0 +1,66 @@ +import { note, log } from "@clack/prompts"; +import { executeCommandInteractive } from "./docker-interactive.js"; +import { getDockerCompatibleEnvVars } from "./env-docker.js"; +import path from "path"; + +export async function deployTriggerTasks(rootDir: string): Promise { + const webappDir = path.join(rootDir, "apps", "webapp"); + const databaseDir = path.join(rootDir, "packages", "database"); + const typesDir = path.join(rootDir, "packages", "types"); + + note( + "We'll now deploy the trigger tasks to your Trigger.dev instance.", + "πŸš€ Deploying Trigger.dev tasks" + ); + + try { + // Login to trigger.dev CLI + await executeCommandInteractive( + "npx -y trigger.dev@4.0.0-v4-beta.22 login -a http://localhost:8030", + { + cwd: rootDir, + message: "Logging in to Trigger.dev CLI...", + showOutput: true, + } + ); + + await executeCommandInteractive("pnpm install", { + cwd: rootDir, + message: "Running package installation", + showOutput: true, + }); + + const envVars = await getDockerCompatibleEnvVars(rootDir); + + await executeCommandInteractive("pnpm build", { + cwd: databaseDir, + message: "Building @core/database...", + showOutput: true, + env: { + DATABASE_URL: envVars.DATABASE_URL as string, + }, + }); + + await executeCommandInteractive("pnpm build", { + cwd: typesDir, + message: "Building @core/types...", + showOutput: true, + }); + + // Deploy trigger tasks + await executeCommandInteractive("pnpm run trigger:deploy", { + cwd: webappDir, + message: "Deploying Trigger.dev tasks...", + showOutput: true, + env: envVars, + }); + + log.success("Trigger.dev tasks deployed successfully!"); + } catch (error: any) { + log.warning("Failed to deploy Trigger.dev tasks:"); + note( + `${error.message}\n\nYou can deploy them manually later with:\n1. npx trigger.dev@v4-beta login -a http://localhost:8030\n2. pnpm trigger:deploy`, + "Manual Deployment" + ); + } +} \ No newline at end of file diff --git a/packages/database/package.json b/packages/database/package.json index 10f3958..5dac57f 100644 --- a/packages/database/package.json +++ b/packages/database/package.json @@ -21,7 +21,7 @@ "db:studio": "prisma studio", "db:reset": "prisma migrate reset", "typecheck": "tsc --noEmit", - "build": "pnpm run clean && tsc --noEmit false --outDir dist --declaration", + "build": "pnpm run generate && pnpm run clean && tsc --noEmit false --outDir dist --declaration", "dev": "tsc --noEmit false --outDir dist --declaration --watch" } } \ No newline at end of file diff --git a/packages/mcp-proxy/src/core/mcp-remote-client.ts b/packages/mcp-proxy/src/core/mcp-remote-client.ts index 26f9975..ec093f6 100644 --- a/packages/mcp-proxy/src/core/mcp-remote-client.ts +++ b/packages/mcp-proxy/src/core/mcp-remote-client.ts @@ -1,6 +1,7 @@ import { Client } from "@modelcontextprotocol/sdk/client/index.js"; import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js"; import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js"; +import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js"; import { MCPRemoteClientConfig, AuthenticationResult, @@ -153,7 +154,7 @@ export function createMCPProxy( redirectUrl: string, transportStrategy: TransportStrategy = "sse-first", clientHeaders?: { sessionId?: string | null; lastEventId?: string | null } - ): Promise { + ): Promise { // Create auth provider with stored credentials using common factory const authProvider = await createAuthProviderForProxy(serverUrl, credentials, redirectUrl); @@ -173,9 +174,16 @@ export function createMCPProxy( } // Create transport based on strategy (don't start yet) - let transport: SSEClientTransport | StreamableHTTPClientTransport; + let transport: SSEClientTransport | StreamableHTTPClientTransport | StdioClientTransport; switch (transportStrategy) { + case "stdio": + // For stdio transport, serverUrl should contain the command to execute + // This is mainly for completeness - prefer using createMCPStdioProxy directly + throw new Error( + "Stdio transport not supported in createRemoteTransport. Use createMCPStdioProxy instead." + ); + case "sse-only": transport = new SSEClientTransport(url, { authProvider, @@ -227,6 +235,257 @@ export function createMCPProxy( } } +/** + * Creates an MCP proxy that forwards requests to a stdio process. + * Maintains a mapping of sessionId -> StdioClientTransport for reuse. + * If sessionId is provided, it is returned in the response header as mcp_session_id. + * @param request The incoming HTTP request + * @param command The command to execute for the stdio process + * @param args Arguments for the command + * @param options Optional configuration for the proxy + * @param sessionId Optional session id for transport reuse + * @returns Promise that resolves to the HTTP response + */ +// Track both the transport and its last used timestamp +type StdioTransportEntry = { + transport: StdioClientTransport; + lastUsed: number; // ms since epoch +}; + +const stdioTransports: Map = new Map(); + +/** + * Cleans up any stdio transports that have not been used in the last 5 minutes. + * Closes and removes them from the map. + */ +function cleanupOldStdioTransports() { + const now = Date.now(); + const FIVE_MINUTES = 5 * 60 * 1000; + for (const [sessionId, entry] of stdioTransports.entries()) { + if (now - entry.lastUsed > FIVE_MINUTES) { + try { + entry.transport.close?.(); + } catch (err) { + // ignore + } + stdioTransports.delete(sessionId); + } + } +} + +export function createMCPStdioProxy( + request: Request, + command: string, + args?: string[], + options?: { + /** Enable debug logging */ + debug?: boolean; + /** Environment variables to pass to the process */ + env?: Record; + /** Custom header-to-environment variable mapping */ + headerMapping?: Record; + /** Optional session id for transport reuse */ + sessionId?: string; + } +): Promise { + return new Promise(async (resolve) => { + let bridge: any = null; + let serverTransport: StdioClientTransport | undefined; + let sessionId: string | undefined = + options?.sessionId || request.headers.get("Mcp-Session-Id") || undefined; + + // Clean up old transports before handling new connection + cleanupOldStdioTransports(); + + try { + // Extract headers from the incoming request and convert to environment variables + const env = createEnvironmentFromRequest( + request, + options?.env || {}, + options?.headerMapping || {} + ); + + // If sessionId is provided, try to reuse the transport + let entry: StdioTransportEntry | undefined; + if (sessionId) { + entry = stdioTransports.get(sessionId); + if (entry) { + serverTransport = entry.transport; + entry.lastUsed = Date.now(); + } + } + + // If no transport exists for this sessionId, create a new one and store it + if (!serverTransport) { + serverTransport = new StdioClientTransport({ + command, + args: args || [], + env, + }); + await serverTransport.start(); + if (sessionId) { + stdioTransports.set(sessionId, { + transport: serverTransport, + lastUsed: Date.now(), + }); + } + } + + // Create Remix transport (converts HTTP to MCP messages) + // We need to wrap resolve to inject the sessionId header if present + const resolveWithSessionId = (response: Response) => { + if (sessionId) { + // Clone the response and add the mcp_session_id header + const headers = new Headers(response.headers); + headers.set("mcp-session-id", sessionId); + resolve( + new Response(response.body, { + status: response.status, + statusText: response.statusText, + headers, + }) + ); + } else { + resolve(response); + } + }; + + const clientTransport = new RemixMCPTransport(request, resolveWithSessionId); + + // Bridge the transports + const bridgeOptions: any = { + debug: options?.debug || false, + onError: (error: Error, source: string) => { + console.error(`[MCP Stdio Bridge] ${source} error:`, error); + }, + }; + + if (options?.debug) { + bridgeOptions.onMessage = (direction: string, message: any) => { + console.log(`[MCP Stdio Bridge] ${direction}:`, message.method || message.id); + }; + } + + bridge = createMCPTransportBridge( + clientTransport as any, + serverTransport as any, + bridgeOptions + ); + + // Start only the client transport (server is already started) + await clientTransport.start(); + } catch (error) { + console.error("MCP Stdio Proxy Error:", error); + + if (bridge) { + bridge.close().catch(console.error); + } + + const errorMessage = error instanceof Error ? error.message : String(error); + // Always include mcp_session_id header if sessionId is present + const headers: Record = { "Content-Type": "application/json" }; + if (sessionId) { + headers["mcp-session-id"] = sessionId; + } + resolve( + new Response( + JSON.stringify({ + error: `Stdio proxy error: ${errorMessage}`, + }), + { + status: 500, + headers, + } + ) + ); + } + }); +} + +/** + * Creates environment variables from request headers + */ +function createEnvironmentFromRequest( + request: Request, + baseEnv: Record, + headerMapping: Record +): Record { + // Start with base environment (inherit safe environment variables) + const env: Record = { + ...getDefaultEnvironment(), + ...baseEnv, + }; + + // Add standard MCP headers as environment variables + const sessionId = request.headers.get("Mcp-Session-Id"); + const lastEventId = request.headers.get("Last-Event-Id"); + const contentType = request.headers.get("Content-Type"); + const userAgent = request.headers.get("User-Agent"); + + if (sessionId) { + env["MCP_SESSION_ID"] = sessionId; + } + if (lastEventId) { + env["MCP_LAST_EVENT_ID"] = lastEventId; + } + if (contentType) { + env["MCP_CONTENT_TYPE"] = contentType; + } + if (userAgent) { + env["MCP_USER_AGENT"] = userAgent; + } + + // Apply custom header-to-environment variable mapping + for (const [headerName, envVarName] of Object.entries(headerMapping)) { + const headerValue = request.headers.get(headerName); + if (headerValue) { + env[envVarName] = headerValue; + } + } + + return env; +} + +/** + * Returns a default environment object including only environment variables deemed safe to inherit. + */ +function getDefaultEnvironment(): Record { + const DEFAULT_INHERITED_ENV_VARS = + process.platform === "win32" + ? [ + "APPDATA", + "HOMEDRIVE", + "HOMEPATH", + "LOCALAPPDATA", + "PATH", + "PROCESSOR_ARCHITECTURE", + "SYSTEMDRIVE", + "SYSTEMROOT", + "TEMP", + "USERNAME", + "USERPROFILE", + ] + : ["HOME", "LOGNAME", "PATH", "SHELL", "TERM", "USER"]; + + const env: Record = {}; + + for (const key of DEFAULT_INHERITED_ENV_VARS) { + const value = process.env[key]; + if (value === undefined) { + continue; + } + + if (value.startsWith("()")) { + // Skip functions, which are a security risk. + continue; + } + + env[key] = value; + } + + return env; +} + export class MCPAuthenticationClient { private serverUrlHash: string; private authProvider: NodeOAuthClientProvider | null = null; diff --git a/packages/mcp-proxy/src/index.ts b/packages/mcp-proxy/src/index.ts index 6ea643b..4a4e149 100644 --- a/packages/mcp-proxy/src/index.ts +++ b/packages/mcp-proxy/src/index.ts @@ -5,6 +5,7 @@ export * from "./types/index.js"; export { createMCPAuthClient, createMCPProxy, + createMCPStdioProxy, MCPAuthenticationClient, } from "./core/mcp-remote-client.js"; diff --git a/packages/mcp-proxy/src/types/remote-client.ts b/packages/mcp-proxy/src/types/remote-client.ts index 1e7f30e..977bfb5 100644 --- a/packages/mcp-proxy/src/types/remote-client.ts +++ b/packages/mcp-proxy/src/types/remote-client.ts @@ -51,7 +51,7 @@ export interface ProxyConnectionConfig { /** * Transport strategy options */ -export type TransportStrategy = "sse-only" | "http-only" | "sse-first" | "http-first"; +export type TransportStrategy = "sse-only" | "http-only" | "sse-first" | "http-first" | "stdio"; /** * Static OAuth client metadata diff --git a/packages/mcp-proxy/src/utils/mcp-transport-bridge.ts b/packages/mcp-proxy/src/utils/mcp-transport-bridge.ts index c71d575..e312e97 100644 --- a/packages/mcp-proxy/src/utils/mcp-transport-bridge.ts +++ b/packages/mcp-proxy/src/utils/mcp-transport-bridge.ts @@ -23,7 +23,7 @@ export function createMCPTransportBridge( // Forward messages from client to server clientTransport.onmessage = (message: any, extra: any) => { - console.log(JSON.stringify(message)); + console.log(message); log("[Clientβ†’Server]", message.method || message.id); onMessage?.("client-to-server", message); @@ -41,6 +41,7 @@ export function createMCPTransportBridge( // Forward messages from server to client serverTransport.onmessage = (message: any, extra: any) => { + console.log(message); console.log(JSON.stringify(message), JSON.stringify(extra)); log("[Serverβ†’Client]", message.method || message.id); onMessage?.("server-to-client", message); diff --git a/packages/mcp-proxy/src/utils/mcp-transport.ts b/packages/mcp-proxy/src/utils/mcp-transport.ts index f26d6f7..d9ed993 100644 --- a/packages/mcp-proxy/src/utils/mcp-transport.ts +++ b/packages/mcp-proxy/src/utils/mcp-transport.ts @@ -34,13 +34,24 @@ export class RemixMCPTransport implements Transport { throw new Error("Invalid JSON-RPC message"); } - // Emit the message to handler - if (this.onmessage) { - try { - this.onmessage(message); - } catch (error) { - if (this.onerror) { - this.onerror(error as Error); + if (message.method.includes("notifications")) { + this.send({}); + return; + } + + console.log(message, "message"); + + if (Object.keys(message).length === 0) { + this.send({}); + } else { + // Emit the message to handler + if (this.onmessage) { + try { + this.onmessage(message); + } catch (error) { + if (this.onerror) { + this.onerror(error as Error); + } } } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f3a8e6f..8069469 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -568,6 +568,9 @@ importers: dotenv: specifier: ^16.4.5 version: 16.5.0 + dotenv-expand: + specifier: ^12.0.2 + version: 12.0.2 esbuild: specifier: ^0.23.0 version: 0.23.1 @@ -6315,6 +6318,10 @@ packages: resolution: {integrity: sha512-GopVGCpVS1UKH75VKHGuQFqS1Gusej0z4FyQkPdwjil2gNIv+LNsqBlboOzpJFZKVT95GkCyWJbBSdFEFUWI2A==} engines: {node: '>=12'} + dotenv-expand@12.0.2: + resolution: {integrity: sha512-lXpXz2ZE1cea1gL4sz2Ipj8y4PiVjytYr3Ij0SWoms1PGxIv7m2CRKuRuCRtHdVuvM/hNJPMxt5PbhboNC4dPQ==} + engines: {node: '>=12'} + dotenv@16.0.3: resolution: {integrity: sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==} engines: {node: '>=12'} @@ -17106,6 +17113,10 @@ snapshots: dotenv-expand@10.0.0: {} + dotenv-expand@12.0.2: + dependencies: + dotenv: 16.5.0 + dotenv@16.0.3: {} dotenv@16.4.7: {} diff --git a/trigger/.env.example b/trigger/.env.example index 1ab3687..78902ca 100644 --- a/trigger/.env.example +++ b/trigger/.env.example @@ -31,7 +31,7 @@ POSTGRES_USER=docker POSTGRES_PASSWORD=docker TRIGGER_DB=trigger -DB_HOST=localhost +DB_HOST=host.docker.internal DB_PORT=5432 DB_SCHEMA=sigma diff --git a/trigger/auth.htpasswd b/trigger/auth.htpasswd new file mode 100644 index 0000000..62dbff9 --- /dev/null +++ b/trigger/auth.htpasswd @@ -0,0 +1 @@ +registry-user:$2y$05$6ingYqw0.3j13dxHY4w3neMSvKhF3pvRmc0AFifScWsVA9JpuLwNK \ No newline at end of file diff --git a/trigger/docker-compose.yaml b/trigger/docker-compose.yaml index 558edb0..56bf19b 100644 --- a/trigger/docker-compose.yaml +++ b/trigger/docker-compose.yaml @@ -146,7 +146,7 @@ services: - webapp volumes: # registry-user:very-secure-indeed - - ../registry/auth.htpasswd:/auth/htpasswd:ro + - ./auth.htpasswd:/auth/htpasswd:ro environment: REGISTRY_AUTH: htpasswd REGISTRY_AUTH_HTPASSWD_REALM: Registry Realm diff --git a/turbo.json b/turbo.json index 9280d93..9787ad1 100644 --- a/turbo.json +++ b/turbo.json @@ -52,6 +52,7 @@ "SESSION_SECRET", "APP_ORIGIN", "LOGIN_ORIGIN", + "API_BASE_URL", "POSTHOG_PROJECT_KEY", "AUTH_GOOGLE_CLIENT_ID", "AUTH_GOOGLE_CLIENT_SECRET",