Fix: core cli to work with core repo

This commit is contained in:
Harshith Mullapudi 2025-07-21 22:37:04 +05:30
parent c4467a2306
commit cd139f715a
35 changed files with 609 additions and 180 deletions

View File

@ -12,7 +12,7 @@ POSTGRES_PASSWORD=docker
POSTGRES_DB=core POSTGRES_DB=core
LOGIN_ORIGIN=http://localhost:3033 LOGIN_ORIGIN=http://localhost:3033
DATABASE_URL="postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB}?schema=core" DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB}?schema=core"
# This sets the URL used for direct connections to the database and should only be needed in limited circumstances # This sets the URL used for direct connections to the database and should only be needed in limited circumstances
# See: https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#fields:~:text=the%20shadow%20database.-,directUrl,-No # See: https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#fields:~:text=the%20shadow%20database.-,directUrl,-No
@ -22,6 +22,8 @@ REMIX_APP_PORT=3033
APP_ENV=production APP_ENV=production
NODE_ENV=${APP_ENV} NODE_ENV=${APP_ENV}
APP_ORIGIN=http://localhost:3033 APP_ORIGIN=http://localhost:3033
API_BASE_URL=${APP_ORIGIN}
SESSION_SECRET=27192e6432564f4788d55c15131bd5ac SESSION_SECRET=27192e6432564f4788d55c15131bd5ac
ENCRYPTION_KEY=27192e6432564f4788d55c15131bd5ac ENCRYPTION_KEY=27192e6432564f4788d55c15131bd5ac
@ -48,8 +50,8 @@ MAGIC_LINK_SECRET=27192e6432564f4788d55c15131bd5ac
NEO4J_AUTH=neo4j/27192e6432564f4788d55c15131bd5ac NEO4J_AUTH=neo4j/27192e6432564f4788d55c15131bd5ac
OLLAMA_URL=http://ollama:11434 OLLAMA_URL=http://ollama:11434
EMBEDDING_MODEL=GPT41 EMBEDDING_MODEL=text-embedding-3-small
MODEL=GPT41 MODEL=gpt-4.1-2025-04-14
## Trigger ## ## Trigger ##
TRIGGER_PROJECT_ID= TRIGGER_PROJECT_ID=

View File

@ -51,7 +51,7 @@ export function LogsFilters({
const handleBack = () => setStep("main"); const handleBack = () => setStep("main");
return ( return (
<div className="mb-4 flex items-center gap-2"> <div className="mb-4 flex w-full items-center justify-start gap-2">
<Popover <Popover
open={popoverOpen} open={popoverOpen}
onOpenChange={(open) => { onOpenChange={(open) => {

View File

@ -153,7 +153,7 @@ export function VirtualLogsList({
const itemCount = hasMore ? logs.length + 1 : logs.length; const itemCount = hasMore ? logs.length + 1 : logs.length;
return ( return (
<div className="h-[calc(100vh_-_132px)] overflow-hidden rounded-lg"> <div className="h-full grow overflow-hidden rounded-lg">
<AutoSizer className="h-full"> <AutoSizer className="h-full">
{({ width, height: autoHeight }) => ( {({ width, height: autoHeight }) => (
<InfiniteLoader <InfiniteLoader

View File

@ -4,6 +4,8 @@ import { getIntegrationDefinitionWithSlug } from "~/services/integrationDefiniti
import { proxyRequest } from "~/utils/proxy.server"; import { proxyRequest } from "~/utils/proxy.server";
import { z } from "zod"; import { z } from "zod";
import { getIntegrationAccount } from "~/services/integrationAccount.server"; import { getIntegrationAccount } from "~/services/integrationAccount.server";
import { createMCPStdioProxy } from "@core/mcp-proxy";
import { randomUUID } from "node:crypto";
export const integrationSlugSchema = z.object({ export const integrationSlugSchema = z.object({
slug: z.string(), slug: z.string(),
@ -48,7 +50,7 @@ const { action, loader } = createActionApiRoute(
const spec = integrationDefinition.spec as any; const spec = integrationDefinition.spec as any;
if (!spec.mcpAuth) { if (!spec.mcp) {
return new Response( return new Response(
JSON.stringify({ JSON.stringify({
error: "MCP auth configuration not found for this integration", error: "MCP auth configuration not found for this integration",
@ -60,37 +62,57 @@ const { action, loader } = createActionApiRoute(
); );
} }
const { serverUrl } = spec.mcpAuth; const { url, type } = spec.mcp;
// Find the integration account for this user and integration if (type === "http") {
const integrationAccount = await getIntegrationAccount( // Find the integration account for this user and integration
integrationDefinition.id, const integrationAccount = await getIntegrationAccount(
authentication.userId, integrationDefinition.id,
); authentication.userId,
const integrationConfig =
integrationAccount?.integrationConfiguration as any;
if (!integrationAccount || !integrationConfig || !integrationConfig.mcp) {
return new Response(
JSON.stringify({
error: "No integration account with mcp config",
}),
{
status: 400,
headers: { "Content-Type": "application/json" },
},
); );
}
// Proxy the request to the serverUrl const integrationConfig =
return await proxyRequest( integrationAccount?.integrationConfiguration as any;
request,
serverUrl, if (
integrationConfig.mcp.tokens.access_token, !integrationAccount ||
); !integrationConfig ||
!integrationConfig.mcp
) {
return new Response(
JSON.stringify({
error: "No integration account with mcp config",
}),
{
status: 400,
headers: { "Content-Type": "application/json" },
},
);
}
// Proxy the request to the serverUrl
return await proxyRequest(
request,
url,
integrationConfig.mcp.tokens.access_token,
);
} else {
const { command } = spec.mcp;
// Get session_id from headers (case-insensitive), or generate a new uuid if not present
const sessionId =
request.headers.get("mcp-session-id") ||
request.headers.get("Mcp-Session-Id") ||
randomUUID();
return createMCPStdioProxy(request, "npx", ["-y", "hevy-mcp"], {
env: {
HEVY_API_KEY: "e1fa3a63-c7c2-4335-9753-042bd9028330",
},
sessionId,
});
}
} catch (error: any) { } catch (error: any) {
console.error("MCP Proxy Error:", error);
return new Response(JSON.stringify({ error: error.message }), { return new Response(JSON.stringify({ error: error.message }), {
status: 500, status: 500,
headers: { "Content-Type": "application/json" }, headers: { "Content-Type": "application/json" },

View File

@ -53,14 +53,14 @@ export async function loader({ request }: LoaderFunctionArgs) {
const spec = integrationDefinition.spec as any; const spec = integrationDefinition.spec as any;
if (!spec.mcpAuth) { if (!spec.mcp) {
throw new Error("MCP auth configuration not found for this integration"); throw new Error("MCP auth configuration not found for this integration");
} }
const { transportStrategy, serverUrl } = spec.mcpAuth; const { transportStrategy, url } = spec.mcp;
const authClient = createMCPAuthClient({ const authClient = createMCPAuthClient({
serverUrl, serverUrl: url,
transportStrategy: transportStrategy || "sse-first", transportStrategy: transportStrategy || "sse-first",
redirectUrl: MCP_CALLBACK_URL, redirectUrl: MCP_CALLBACK_URL,
}); });

View File

@ -137,7 +137,9 @@ export default function IntegrationDetail() {
); );
const hasApiKey = !!specData?.auth?.api_key; const hasApiKey = !!specData?.auth?.api_key;
const hasOAuth2 = !!specData?.auth?.OAuth2; const hasOAuth2 = !!specData?.auth?.OAuth2;
const hasMCPAuth = !!specData?.mcpAuth; const hasMCPAuth = !!(
specData?.mcp.type === "url" && specData?.mcp.needsAuth
);
const Component = getIcon(integration.icon as IconType); const Component = getIcon(integration.icon as IconType);
return ( return (

View File

@ -75,9 +75,9 @@ export default function LogsActivity() {
)} )}
{/* Logs List */} {/* Logs List */}
<div className="space-y-4"> <div className="flex h-full w-full space-y-4">
{logs.length === 0 ? ( {logs.length === 0 ? (
<Card> <Card className="bg-background-2 w-full">
<CardContent className="bg-background-2 flex items-center justify-center py-16"> <CardContent className="bg-background-2 flex items-center justify-center py-16">
<div className="text-center"> <div className="text-center">
<Activity className="text-muted-foreground mx-auto mb-4 h-12 w-12" /> <Activity className="text-muted-foreground mx-auto mb-4 h-12 w-12" />

View File

@ -45,7 +45,7 @@ export default function LogsAll() {
}, },
]} ]}
/> />
<div className="flex h-[calc(100vh_-_56px)] flex-col items-center space-y-6 p-4 px-5"> <div className="flex h-[calc(100vh_-_56px)] w-full flex-col items-center space-y-6 p-4 px-5">
{isInitialLoad ? ( {isInitialLoad ? (
<> <>
<LoaderCircle className="text-primary h-4 w-4 animate-spin" />{" "} <LoaderCircle className="text-primary h-4 w-4 animate-spin" />{" "}
@ -64,10 +64,10 @@ export default function LogsAll() {
/> />
)} )}
{/* Logs List */} {/* Logs List */}
<div className="space-y-4"> <div className="flex h-full w-full space-y-4">
{logs.length === 0 ? ( {logs.length === 0 ? (
<Card> <Card className="bg-background-2 w-full">
<CardContent className="bg-background-2 flex items-center justify-center py-16"> <CardContent className="bg-background-2 flex w-full items-center justify-center py-16">
<div className="text-center"> <div className="text-center">
<Database className="text-muted-foreground mx-auto mb-4 h-12 w-12" /> <Database className="text-muted-foreground mx-auto mb-4 h-12 w-12" />
<h3 className="mb-2 text-lg font-semibold"> <h3 className="mb-2 text-lg font-semibold">

View File

@ -158,7 +158,8 @@ export default function LoginMagicLinkPage() {
<Fieldset className="flex w-full flex-col items-center gap-y-2 px-2"> <Fieldset className="flex w-full flex-col items-center gap-y-2 px-2">
<FormButtons <FormButtons
cancelButton={ cancelButton={<></>}
confirmButton={
<Button <Button
type="submit" type="submit"
name="action" name="action"
@ -169,7 +170,6 @@ export default function LoginMagicLinkPage() {
Re-enter email Re-enter email
</Button> </Button>
} }
confirmButton={<></>}
/> />
</Fieldset> </Fieldset>
</Card> </Card>

View File

@ -199,13 +199,13 @@ export default function OAuthAuthorize() {
<p className="text-muted-foreground mb-2 text-sm">Permissions</p> <p className="text-muted-foreground mb-2 text-sm">Permissions</p>
<ul className="text-muted-foreground text-sm"> <ul className="text-muted-foreground text-sm">
{params.scope?.split(",").map((scope, index, arr) => { {params.scope?.split(" ").map((scope, index, arr) => {
const isFirst = index === 0; const isFirst = index === 0;
const isLast = index === arr.length - 1; const isLast = index === arr.length - 1;
return ( return (
<li <li
key={index} key={index}
className={`border-border flex items-center gap-2 border-x border-t p-2 ${isLast ? "border-b" : ""} ${isFirst ? "rounded-tl-md rounded-tr-md" : ""} ${isLast ? "rounded-br-md rounded-bl-md" : ""} `} className={`flex items-center gap-2 border-x border-t border-gray-300 p-2 ${isLast ? "border-b" : ""} ${isFirst ? "rounded-tl-md rounded-tr-md" : ""} ${isLast ? "rounded-br-md rounded-bl-md" : ""} `}
> >
<div>{getIcon(scope)}</div> <div>{getIcon(scope)}</div>
<div> <div>

View File

@ -278,14 +278,14 @@ export async function getRedirectURLForMCP(
const spec = integrationDefinition.spec as any; const spec = integrationDefinition.spec as any;
if (!spec.mcpAuth) { if (!spec.mcp) {
throw new Error("MCP auth configuration not found for this integration"); throw new Error("MCP auth configuration not found for this integration");
} }
const { serverUrl, transportStrategy } = spec.mcpAuth; const { url, transportStrategy } = spec.mcp;
const authClient = createMCPAuthClient({ const authClient = createMCPAuthClient({
serverUrl, serverUrl: url,
transportStrategy: transportStrategy || "sse-first", transportStrategy: transportStrategy || "sse-first",
redirectUrl: MCP_CALLBACK_URL, redirectUrl: MCP_CALLBACK_URL,
}); });

View File

@ -83,6 +83,7 @@ export class OAuth2Service {
// Validate redirect URI // Validate redirect URI
validateRedirectUri(client: any, redirectUri: string): boolean { validateRedirectUri(client: any, redirectUri: string): boolean {
console.log(redirectUri);
const allowedUris = client.redirectUris const allowedUris = client.redirectUris
.split(",") .split(",")
.map((uri: string) => uri.trim()); .map((uri: string) => uri.trim());

View File

@ -24,11 +24,14 @@ export interface OAuth2Context {
export async function requireOAuth2(request: Request): Promise<OAuth2Context> { export async function requireOAuth2(request: Request): Promise<OAuth2Context> {
const authHeader = request.headers.get("authorization"); const authHeader = request.headers.get("authorization");
if (!authHeader || !authHeader.startsWith("Bearer ")) { if (!authHeader || !authHeader.startsWith("Bearer ")) {
throw json( throw json(
{ error: "invalid_token", error_description: "Missing or invalid authorization header" }, {
{ status: 401 } error: "invalid_token",
error_description: "Missing or invalid authorization header",
},
{ status: 401 },
); );
} }
@ -36,7 +39,7 @@ export async function requireOAuth2(request: Request): Promise<OAuth2Context> {
try { try {
const accessToken = await oauth2Service.validateAccessToken(token); const accessToken = await oauth2Service.validateAccessToken(token);
return { return {
user: { user: {
id: accessToken.user.id, id: accessToken.user.id,
@ -59,13 +62,18 @@ export async function requireOAuth2(request: Request): Promise<OAuth2Context> {
}; };
} catch (error) { } catch (error) {
throw json( throw json(
{ error: "invalid_token", error_description: "Invalid or expired access token" }, {
{ status: 401 } error: "invalid_token",
error_description: "Invalid or expired access token",
},
{ status: 401 },
); );
} }
} }
export async function getOAuth2Context(request: Request): Promise<OAuth2Context | null> { export async function getOAuth2Context(
request: Request,
): Promise<OAuth2Context | null> {
try { try {
return await requireOAuth2(request); return await requireOAuth2(request);
} catch (error) { } catch (error) {
@ -73,20 +81,31 @@ export async function getOAuth2Context(request: Request): Promise<OAuth2Context
} }
} }
export function hasScope(context: OAuth2Context, requiredScope: string): boolean { export function hasScope(
context: OAuth2Context,
requiredScope: string,
): boolean {
if (!context.token.scope) { if (!context.token.scope) {
return false; return false;
} }
const scopes = context.token.scope.split(' '); const scopes = context.token.scope.split(" ");
return scopes.includes(requiredScope); return scopes.includes(requiredScope);
} }
export function requireScope(context: OAuth2Context, requiredScope: string): void { export function requireScope(
context: OAuth2Context,
requiredScope: string,
): void {
if (!hasScope(context, requiredScope)) { if (!hasScope(context, requiredScope)) {
throw json( throw json(
{ error: "insufficient_scope", error_description: `Required scope: ${requiredScope}` }, {
{ status: 403 } error: "insufficient_scope",
error_description: `Required scope: ${requiredScope}`,
},
{ status: 403 },
); );
} }
} }
export function getEnvForCommand() {}

View File

@ -10,8 +10,8 @@
"lint:fix": "eslint --fix --ignore-path .gitignore --cache --cache-location ./node_modules/.cache/eslint .", "lint:fix": "eslint --fix --ignore-path .gitignore --cache --cache-location ./node_modules/.cache/eslint .",
"start": "remix-serve ./build/server/index.js", "start": "remix-serve ./build/server/index.js",
"typecheck": "tsc", "typecheck": "tsc",
"trigger:dev": "pnpm dlx trigger.dev@v4-beta dev", "trigger:dev": "pnpm dlx trigger.dev@4.0.0-v4-beta.22 dev",
"trigger:deploy": "pnpm dlx trigger.dev@v4-beta deploy" "trigger:deploy": "pnpm dlx trigger.dev@4.0.0-v4-beta.22 deploy"
}, },
"dependencies": { "dependencies": {
"@ai-sdk/anthropic": "^1.2.12", "@ai-sdk/anthropic": "^1.2.12",

View File

@ -27,8 +27,16 @@ export default defineConfig({
build: { build: {
extensions: [ extensions: [
syncEnvVars(() => ({ syncEnvVars(() => ({
DATABASE_URL: process.env.DATABASE_URL as string, ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY as string,
API_BASE_URL: process.env.API_BASE_URL as string, API_BASE_URL: process.env.API_BASE_URL as string,
DATABASE_URL: process.env.DATABASE_URL as string,
EMBEDDING_MODEL: process.env.EMBEDDING_MODEL as string,
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY as string,
MODEL: process.env.MODEL ?? "gpt-4.1-2025-04-14",
NEO4J_PASSWORD: process.env.NEO4J_PASSWORD as string,
NEO4J_URI: process.env.NEO4J_URI as string,
NEO4J_USERNAME: process.env.NEO4J_USERNAME as string,
OPENAI_API_KEY: process.env.OPENAI_API_KEY as string,
})), })),
prismaExtension({ prismaExtension({
schema: "prisma/schema.prisma", schema: "prisma/schema.prisma",

View File

@ -40,7 +40,7 @@ services:
postgres: postgres:
container_name: core-postgres container_name: core-postgres
image: redplanethq/postgres:0.1.0 image: redplanethq/postgres:0.1.2
environment: environment:
- POSTGRES_USER=${POSTGRES_USER} - POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD}

View File

@ -12,9 +12,9 @@
"label": "Linear API Key" "label": "Linear API Key"
} }
}, },
"mcpAuth": { "mcp": {
"serverUrl": "https://mcp.linear.app/sse", "type": "http",
"transportStrategy": "sse-first", "url": "https://mcp.linear.app/mcp",
"needsSeparateAuth": true "needsAuth": true
} }
} }

View File

@ -4,12 +4,11 @@
"description": "Connect your workspace to Slack. Run your workflows from slack bookmarks", "description": "Connect your workspace to Slack. Run your workflows from slack bookmarks",
"icon": "slack", "icon": "slack",
"mcp": { "mcp": {
"type": "stdio",
"command": "npx", "command": "npx",
"args": ["-y", "@modelcontextprotocol/server-slack"], "args": [ "-y", "@modelcontextprotocol/server-slack" ],
"env": { "env": {
"SLACK_BOT_TOKEN": "${config:access_token}", "SLACK_BOT_TOKEN": "${config:access_token}"
"SLACK_TEAM_ID": "${config:team_id}",
"SLACK_CHANNEL_IDS": "${config:channel_ids}"
} }
}, },
"auth": { "auth": {

View File

@ -1,6 +1,6 @@
{ {
"name": "@redplanethq/core", "name": "@redplanethq/core",
"version": "0.1.1", "version": "0.1.2",
"description": "A Command-Line Interface for Core", "description": "A Command-Line Interface for Core",
"type": "module", "type": "module",
"license": "MIT", "license": "MIT",
@ -89,6 +89,7 @@
"commander": "^9.4.1", "commander": "^9.4.1",
"defu": "^6.1.4", "defu": "^6.1.4",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
"dotenv-expand": "^12.0.2",
"esbuild": "^0.23.0", "esbuild": "^0.23.0",
"eventsource": "^3.0.2", "eventsource": "^3.0.2",
"evt": "^2.4.13", "evt": "^2.4.13",

View File

@ -1,10 +1,11 @@
import { intro, outro, text, confirm, spinner, note, log } from "@clack/prompts"; import { intro, outro, text, confirm, spinner, note, log } from "@clack/prompts";
import { fileExists, updateEnvFile } from "../utils/file.js"; import { fileExists, updateEnvFile } from "../utils/file.js";
import { checkPostgresHealth } from "../utils/docker.js"; import { checkPostgresHealth } from "../utils/docker.js";
import { executeDockerCommandInteractive } from "../utils/docker-interactive.js"; import { executeCommandInteractive } from "../utils/docker-interactive.js";
import { printCoreBrainLogo } from "../utils/ascii.js"; import { printCoreBrainLogo } from "../utils/ascii.js";
import { setupEnvFile } from "../utils/env.js"; import { setupEnvFile } from "../utils/env.js";
import { hasTriggerConfig } from "../utils/env-checker.js"; import { hasTriggerConfig } from "../utils/env-checker.js";
import { getDockerCompatibleEnvVars } from "../utils/env-docker.js";
import path from "path"; import path from "path";
export async function initCommand() { export async function initCommand() {
@ -14,20 +15,27 @@ export async function initCommand() {
intro("🚀 Core Development Environment Setup"); intro("🚀 Core Development Environment Setup");
// Step 1: Confirm this is the Core repository // Step 1: Confirm this is the Core repository
note("Please ensure you have:\n• Docker and Docker Compose installed\n• Git installed\n• pnpm package manager installed\n• You are in the Core repository directory", "📋 Prerequisites"); note(
"Please ensure you have:\n• Docker and Docker Compose installed\n• Git installed\n• pnpm package manager installed\n• You are in the Core repository directory",
"📋 Prerequisites"
);
const isCoreRepo = await confirm({ const isCoreRepo = await confirm({
message: "Are you currently in the Core repository directory?", message: "Are you currently in the Core repository directory?",
}); });
if (!isCoreRepo) { if (!isCoreRepo) {
note("Please clone the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run 'core init' again.", "📥 Clone Repository"); note(
"Please clone the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run 'core init' again.",
"📥 Clone Repository"
);
outro("❌ Setup cancelled. Please navigate to the Core repository first."); outro("❌ Setup cancelled. Please navigate to the Core repository first.");
process.exit(1); process.exit(1);
} }
const rootDir = process.cwd(); const rootDir = process.cwd();
const triggerDir = path.join(rootDir, "trigger"); const triggerDir = path.join(rootDir, "trigger");
const webappDir = path.join(rootDir, "apps", "webapp");
try { try {
// Step 2: Setup .env file in root // Step 2: Setup .env file in root
@ -51,7 +59,7 @@ export async function initCommand() {
// Step 3: Docker compose up -d in root // Step 3: Docker compose up -d in root
try { try {
await executeDockerCommandInteractive("docker compose up -d", { await executeCommandInteractive("docker compose up -d", {
cwd: rootDir, cwd: rootDir,
message: "Starting Docker containers in root...", message: "Starting Docker containers in root...",
showOutput: true, showOutput: true,
@ -103,7 +111,7 @@ export async function initCommand() {
// Step 6: Docker compose up for trigger // Step 6: Docker compose up for trigger
try { try {
await executeDockerCommandInteractive("docker compose up -d", { await executeCommandInteractive("docker compose up -d", {
cwd: triggerDir, cwd: triggerDir,
message: "Starting Trigger.dev containers...", message: "Starting Trigger.dev containers...",
showOutput: true, showOutput: true,
@ -175,6 +183,16 @@ export async function initCommand() {
}, },
}); });
const openaiApiKey = await text({
message: "Enter your OpenAI API Key:",
validate: (value) => {
if (!value || value.length === 0) {
return "OpenAI API Key is required";
}
return;
},
});
// Step 11: Update .env with project details // Step 11: Update .env with project details
const s6 = spinner(); const s6 = spinner();
s6.start("Updating .env with Trigger.dev configuration..."); s6.start("Updating .env with Trigger.dev configuration...");
@ -182,6 +200,7 @@ export async function initCommand() {
try { try {
await updateEnvFile(envPath, "TRIGGER_PROJECT_ID", projectId as string); await updateEnvFile(envPath, "TRIGGER_PROJECT_ID", projectId as string);
await updateEnvFile(envPath, "TRIGGER_SECRET_KEY", secretKey as string); await updateEnvFile(envPath, "TRIGGER_SECRET_KEY", secretKey as string);
await updateEnvFile(envPath, "OPENAI_API_KEY", openaiApiKey as string);
s6.stop("✅ Updated .env with Trigger.dev configuration"); s6.stop("✅ Updated .env with Trigger.dev configuration");
} catch (error: any) { } catch (error: any) {
s6.stop("❌ Failed to update .env file"); s6.stop("❌ Failed to update .env file");
@ -190,13 +209,13 @@ export async function initCommand() {
// Step 12: Restart root docker-compose with new configuration // Step 12: Restart root docker-compose with new configuration
try { try {
await executeDockerCommandInteractive("docker compose down", { await executeCommandInteractive("docker compose down", {
cwd: rootDir, cwd: rootDir,
message: "Stopping Core services...", message: "Stopping Core services...",
showOutput: true, showOutput: true,
}); });
await executeDockerCommandInteractive("docker compose up -d", { await executeCommandInteractive("docker compose up -d", {
cwd: rootDir, cwd: rootDir,
message: "Starting Core services with new Trigger.dev configuration...", message: "Starting Core services with new Trigger.dev configuration...",
showOutput: true, showOutput: true,
@ -226,10 +245,10 @@ export async function initCommand() {
const dockerRegistryPassword = getEnvValue("DOCKER_REGISTRY_PASSWORD"); const dockerRegistryPassword = getEnvValue("DOCKER_REGISTRY_PASSWORD");
log.info( log.info(
`docker login ${dockerRegistryUrl} -u ${dockerRegistryUsername} -p ${dockerRegistryPassword}` `docker login -u ${dockerRegistryUsername} -p ${dockerRegistryPassword} ${dockerRegistryUrl} `
); );
} catch (error) { } catch (error) {
log.info("docker login <REGISTRY_URL> -u <USERNAME> -p <PASSWORD>"); log.info("docker login -u <USERNAME> -p <PASSWORD> <REGISTRY_URL>");
} }
const dockerLoginConfirmed = await confirm({ const dockerLoginConfirmed = await confirm({
@ -249,8 +268,8 @@ export async function initCommand() {
try { try {
// Login to trigger.dev CLI // Login to trigger.dev CLI
await executeDockerCommandInteractive( await executeCommandInteractive(
"npx -y trigger.dev@v4-beta login -a http://localhost:8030", "npx -y trigger.dev@4.0.0-v4-beta.22 login -a http://localhost:8030",
{ {
cwd: rootDir, cwd: rootDir,
message: "Logging in to Trigger.dev CLI...", message: "Logging in to Trigger.dev CLI...",
@ -258,11 +277,27 @@ export async function initCommand() {
} }
); );
// Deploy trigger tasks await executeCommandInteractive("pnpm install", {
await executeDockerCommandInteractive("pnpm trigger:deploy", {
cwd: rootDir, cwd: rootDir,
message: "Running package installation",
showOutput: true,
});
await executeCommandInteractive("pnpm build --filter=@core/types --filter=@core/database", {
cwd: rootDir,
message: "Building @core/types and @core/database with turbo...",
showOutput: true,
});
// Deploy trigger tasks
const envVars = await getDockerCompatibleEnvVars(rootDir);
console.log(envVars);
await executeCommandInteractive("pnpm run trigger:deploy", {
cwd: webappDir,
message: "Deploying Trigger.dev tasks...", message: "Deploying Trigger.dev tasks...",
showOutput: true, showOutput: true,
env: envVars,
}); });
log.success("Trigger.dev tasks deployed successfully!"); log.success("Trigger.dev tasks deployed successfully!");

View File

@ -1,50 +1,55 @@
import { intro, outro, note, log, confirm } from '@clack/prompts'; import { intro, outro, note, log, confirm } from "@clack/prompts";
import { executeDockerCommandInteractive } from '../utils/docker-interactive.js'; import { executeCommandInteractive } from "../utils/docker-interactive.js";
import { printCoreBrainLogo } from '../utils/ascii.js'; import { printCoreBrainLogo } from "../utils/ascii.js";
import path from 'path'; import path from "path";
export async function startCommand() { export async function startCommand() {
// Display the CORE brain logo // Display the CORE brain logo
printCoreBrainLogo(); printCoreBrainLogo();
intro('🚀 Starting Core Development Environment'); intro("🚀 Starting Core Development Environment");
// Step 1: Confirm this is the Core repository // Step 1: Confirm this is the Core repository
const isCoreRepo = await confirm({ const isCoreRepo = await confirm({
message: 'Are you currently in the Core repository directory?', message: "Are you currently in the Core repository directory?",
}); });
if (!isCoreRepo) { if (!isCoreRepo) {
note('Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core start" again.', '📥 Core Repository Required'); note(
outro('❌ Please navigate to the Core repository first.'); 'Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core start" again.',
"📥 Core Repository Required"
);
outro("❌ Please navigate to the Core repository first.");
process.exit(1); process.exit(1);
} }
const rootDir = process.cwd(); const rootDir = process.cwd();
const triggerDir = path.join(rootDir, 'trigger'); const triggerDir = path.join(rootDir, "trigger");
try { try {
// Start main services // Start main services
await executeDockerCommandInteractive('docker compose up -d', { await executeCommandInteractive("docker compose up -d", {
cwd: rootDir, cwd: rootDir,
message: 'Starting Core services...', message: "Starting Core services...",
showOutput: true showOutput: true,
}); });
// Start trigger services // Start trigger services
await executeDockerCommandInteractive('docker compose up -d', { await executeCommandInteractive("docker compose up -d", {
cwd: triggerDir, cwd: triggerDir,
message: 'Starting Trigger.dev services...', message: "Starting Trigger.dev services...",
showOutput: true showOutput: true,
}); });
// Final success message // Final success message
outro('🎉 Core Development Environment Started!'); outro("🎉 Core Development Environment Started!");
note('• Core Application: http://localhost:3033\n• Trigger.dev: http://localhost:8030\n• PostgreSQL: localhost:5432', '🌐 Your services are now running'); note(
log.success('Happy coding!'); "• Core Application: http://localhost:3033\n• Trigger.dev: http://localhost:8030\n• PostgreSQL: localhost:5432",
"🌐 Your services are now running"
);
log.success("Happy coding!");
} catch (error: any) { } catch (error: any) {
outro(`❌ Failed to start services: ${error.message}`); outro(`❌ Failed to start services: ${error.message}`);
process.exit(1); process.exit(1);
} }
} }

View File

@ -1,50 +1,52 @@
import { intro, outro, log, confirm, note } from '@clack/prompts'; import { intro, outro, log, confirm, note } from "@clack/prompts";
import { executeDockerCommandInteractive } from '../utils/docker-interactive.js'; import { executeCommandInteractive } from "../utils/docker-interactive.js";
import { printCoreBrainLogo } from '../utils/ascii.js'; import { printCoreBrainLogo } from "../utils/ascii.js";
import path from 'path'; import path from "path";
export async function stopCommand() { export async function stopCommand() {
// Display the CORE brain logo // Display the CORE brain logo
printCoreBrainLogo(); printCoreBrainLogo();
intro('🛑 Stopping Core Development Environment'); intro("🛑 Stopping Core Development Environment");
// Step 1: Confirm this is the Core repository // Step 1: Confirm this is the Core repository
const isCoreRepo = await confirm({ const isCoreRepo = await confirm({
message: 'Are you currently in the Core repository directory?', message: "Are you currently in the Core repository directory?",
}); });
if (!isCoreRepo) { if (!isCoreRepo) {
note('Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core stop" again.', '📥 Core Repository Required'); note(
outro('❌ Please navigate to the Core repository first.'); 'Please navigate to the Core repository first:\n\ngit clone https://github.com/redplanethq/core.git\ncd core\n\nThen run "core stop" again.',
"📥 Core Repository Required"
);
outro("❌ Please navigate to the Core repository first.");
process.exit(1); process.exit(1);
} }
const rootDir = process.cwd(); const rootDir = process.cwd();
const triggerDir = path.join(rootDir, 'trigger'); const triggerDir = path.join(rootDir, "trigger");
try { try {
// Stop trigger services first // Stop trigger services first
await executeDockerCommandInteractive('docker compose down', { await executeCommandInteractive("docker compose down", {
cwd: triggerDir, cwd: triggerDir,
message: 'Stopping Trigger.dev services...', message: "Stopping Trigger.dev services...",
showOutput: true showOutput: true,
}); });
// Stop main services // Stop main services
await executeDockerCommandInteractive('docker compose down', { await executeCommandInteractive("docker compose down", {
cwd: rootDir, cwd: rootDir,
message: 'Stopping Core services...', message: "Stopping Core services...",
showOutput: true showOutput: true,
}); });
// Final success message // Final success message
outro('🎉 Core Development Environment Stopped!'); outro("🎉 Core Development Environment Stopped!");
log.success('All services have been stopped.'); log.success("All services have been stopped.");
log.info('Run "core start" to start services again.'); log.info('Run "core start" to start services again.');
} catch (error: any) { } catch (error: any) {
outro(`❌ Failed to stop services: ${error.message}`); outro(`❌ Failed to stop services: ${error.message}`);
process.exit(1); process.exit(1);
} }
} }

View File

@ -1,46 +1,45 @@
import { spawn, ChildProcess } from 'child_process'; import { spawn, ChildProcess } from "child_process";
import { spinner } from '@clack/prompts'; import { spinner } from "@clack/prompts";
export interface DockerCommandOptions { export interface CommandOptions {
cwd: string; cwd: string;
message: string; message: string;
showOutput?: boolean; showOutput?: boolean;
env?: Record<string, string>;
} }
export function executeDockerCommandInteractive( export function executeCommandInteractive(command: string, options: CommandOptions): Promise<void> {
command: string,
options: DockerCommandOptions
): Promise<void> {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const s = spinner(); const s = spinner();
s.start(options.message); s.start(options.message);
// Split command into parts // Split command into parts
const parts = command.split(' '); const parts = command.split(" ");
const cmd = parts[0]; const cmd = parts[0];
const args = parts.slice(1); const args = parts.slice(1);
if (!cmd) { if (!cmd) {
reject(new Error('Invalid command')); reject(new Error("Invalid command"));
return; return;
} }
const child: ChildProcess = spawn(cmd, args, { const child: ChildProcess = spawn(cmd, args, {
cwd: options.cwd, cwd: options.cwd,
stdio: options.showOutput ? ['ignore', 'pipe', 'pipe'] : 'ignore', stdio: options.showOutput ? ["ignore", "pipe", "pipe"] : "ignore",
detached: false detached: false,
env: options.env ? { ...process.env, ...options.env } : { ...process.env },
}); });
let output = ''; let output = "";
// Handle stdout // Handle stdout
if (child.stdout && options.showOutput) { if (child.stdout && options.showOutput) {
child.stdout.on('data', (data: Buffer) => { child.stdout.on("data", (data: Buffer) => {
const text = data.toString(); const text = data.toString();
output += text; output += text;
// Update spinner with latest output line // Update spinner with latest output line
const lines = text.trim().split('\n'); const lines = text.trim().split("\n");
const lastLine = lines[lines.length - 1]; const lastLine = lines[lines.length - 1];
if (lastLine && lastLine.trim()) { if (lastLine && lastLine.trim()) {
s.message(`${options.message}\n${lastLine.trim()}`); s.message(`${options.message}\n${lastLine.trim()}`);
@ -50,12 +49,13 @@ export function executeDockerCommandInteractive(
// Handle stderr // Handle stderr
if (child.stderr && options.showOutput) { if (child.stderr && options.showOutput) {
child.stderr.on('data', (data: Buffer) => { child.stderr.on("data", (data: Buffer) => {
const text = data.toString(); const text = data.toString();
output += text; output += text;
// console.log(text);
// Update spinner with error output // Update spinner with error output
const lines = text.trim().split('\n'); const lines = text.trim().split("\n");
const lastLine = lines[lines.length - 1]; const lastLine = lines[lines.length - 1];
if (lastLine && lastLine.trim()) { if (lastLine && lastLine.trim()) {
s.message(`${options.message}\n❌ ${lastLine.trim()}`); s.message(`${options.message}\n❌ ${lastLine.trim()}`);
@ -64,14 +64,14 @@ export function executeDockerCommandInteractive(
} }
// Handle process exit // Handle process exit
child.on('exit', (code: number | null) => { child.on("exit", (code: number | null) => {
if (code === 0) { if (code === 0) {
s.stop(`${options.message.replace(/\.\.\.$/, '')} completed`); s.stop(`${options.message.replace(/\.\.\.$/, "")} completed`);
resolve(); resolve();
} else { } else {
s.stop(`${options.message.replace(/\.\.\.$/, '')} failed (exit code: ${code})`); s.stop(`${options.message.replace(/\.\.\.$/, "")} failed (exit code: ${code})`);
if (options.showOutput && output) { if (options.showOutput && output) {
console.log('\nOutput:'); console.log("\nOutput:");
console.log(output); console.log(output);
} }
reject(new Error(`Command failed with exit code ${code}`)); reject(new Error(`Command failed with exit code ${code}`));
@ -79,30 +79,30 @@ export function executeDockerCommandInteractive(
}); });
// Handle errors // Handle errors
child.on('error', (error: Error) => { child.on("error", (error: Error) => {
s.stop(`${options.message.replace(/\.\.\.$/, '')} failed`); s.stop(`${options.message.replace(/\.\.\.$/, "")} failed`);
reject(error); reject(error);
}); });
// Handle Ctrl+C // Handle Ctrl+C
const handleSigint = () => { const handleSigint = () => {
s.stop(`⏹️ ${options.message.replace(/\.\.\.$/, '')} interrupted`); s.stop(`⏹️ ${options.message.replace(/\.\.\.$/, "")} interrupted`);
child.kill('SIGTERM'); child.kill("SIGTERM");
// Give the process time to clean up // Give the process time to clean up
setTimeout(() => { setTimeout(() => {
if (child.killed === false) { if (child.killed === false) {
child.kill('SIGKILL'); child.kill("SIGKILL");
} }
process.exit(130); // Standard exit code for SIGINT process.exit(130); // Standard exit code for SIGINT
}, 5000); }, 5000);
}; };
process.on('SIGINT', handleSigint); process.on("SIGINT", handleSigint);
// Clean up event listener when done // Clean up event listener when done
child.on('exit', () => { child.on("exit", () => {
process.off('SIGINT', handleSigint); process.off("SIGINT", handleSigint);
}); });
}); });
} }

View File

@ -0,0 +1,49 @@
import path from "path";
import fs from "fs";
import dotenv from "dotenv";
import dotenvExpand from "dotenv-expand";
/**
* Reads environment variables from .env file and replaces localhost URLs with host.docker.internal
* for Docker container compatibility
*/
export async function getDockerCompatibleEnvVars(rootDir: string): Promise<Record<string, string>> {
const envPath = path.join(rootDir, ".env");
try {
// Use dotenv to parse and expand variables
const envVarsExpand =
dotenvExpand.expand(dotenv.config({ path: envPath, processEnv: {} })).parsed || {};
console.log(JSON.stringify(envVarsExpand));
const getEnvValue = (key: string): string => {
return envVarsExpand[key] || "";
};
const replaceLocalhostWithDockerHost = (value: string): string => {
return value
.replace(/localhost/g, "host.docker.internal")
.replace(/127\.0\.0\.1/g, "host.docker.internal");
};
// Get all required environment variables
const envVars = {
ANTHROPIC_API_KEY: getEnvValue("ANTHROPIC_API_KEY"),
API_BASE_URL: replaceLocalhostWithDockerHost(getEnvValue("API_BASE_URL")),
DATABASE_URL: replaceLocalhostWithDockerHost(getEnvValue("DATABASE_URL")),
EMBEDDING_MODEL: getEnvValue("EMBEDDING_MODEL"),
ENCRYPTION_KEY: getEnvValue("ENCRYPTION_KEY"),
MODEL: getEnvValue("MODEL") || "gpt-4.1-2025-04-14",
NEO4J_PASSWORD: getEnvValue("NEO4J_PASSWORD"),
NEO4J_URI: replaceLocalhostWithDockerHost(getEnvValue("NEO4J_URI")),
NEO4J_USERNAME: getEnvValue("NEO4J_USERNAME"),
OPENAI_API_KEY: getEnvValue("OPENAI_API_KEY"),
TRIGGER_PROJECT_ID: getEnvValue("TRIGGER_PROJECT_ID"),
};
return envVars;
} catch (error) {
throw new Error(`Failed to read .env file: ${error}`);
}
}

View File

@ -21,7 +21,7 @@
"db:studio": "prisma studio", "db:studio": "prisma studio",
"db:reset": "prisma migrate reset", "db:reset": "prisma migrate reset",
"typecheck": "tsc --noEmit", "typecheck": "tsc --noEmit",
"build": "pnpm run clean && tsc --noEmit false --outDir dist --declaration", "build": "pnpm run generate && pnpm run clean && tsc --noEmit false --outDir dist --declaration",
"dev": "tsc --noEmit false --outDir dist --declaration --watch" "dev": "tsc --noEmit false --outDir dist --declaration --watch"
} }
} }

View File

@ -1,6 +1,7 @@
import { Client } from "@modelcontextprotocol/sdk/client/index.js"; import { Client } from "@modelcontextprotocol/sdk/client/index.js";
import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js"; import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js";
import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js"; import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js";
import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js";
import { import {
MCPRemoteClientConfig, MCPRemoteClientConfig,
AuthenticationResult, AuthenticationResult,
@ -153,7 +154,7 @@ export function createMCPProxy(
redirectUrl: string, redirectUrl: string,
transportStrategy: TransportStrategy = "sse-first", transportStrategy: TransportStrategy = "sse-first",
clientHeaders?: { sessionId?: string | null; lastEventId?: string | null } clientHeaders?: { sessionId?: string | null; lastEventId?: string | null }
): Promise<SSEClientTransport | StreamableHTTPClientTransport> { ): Promise<SSEClientTransport | StreamableHTTPClientTransport | StdioClientTransport> {
// Create auth provider with stored credentials using common factory // Create auth provider with stored credentials using common factory
const authProvider = await createAuthProviderForProxy(serverUrl, credentials, redirectUrl); const authProvider = await createAuthProviderForProxy(serverUrl, credentials, redirectUrl);
@ -173,9 +174,16 @@ export function createMCPProxy(
} }
// Create transport based on strategy (don't start yet) // Create transport based on strategy (don't start yet)
let transport: SSEClientTransport | StreamableHTTPClientTransport; let transport: SSEClientTransport | StreamableHTTPClientTransport | StdioClientTransport;
switch (transportStrategy) { switch (transportStrategy) {
case "stdio":
// For stdio transport, serverUrl should contain the command to execute
// This is mainly for completeness - prefer using createMCPStdioProxy directly
throw new Error(
"Stdio transport not supported in createRemoteTransport. Use createMCPStdioProxy instead."
);
case "sse-only": case "sse-only":
transport = new SSEClientTransport(url, { transport = new SSEClientTransport(url, {
authProvider, authProvider,
@ -227,6 +235,257 @@ export function createMCPProxy(
} }
} }
/**
* Creates an MCP proxy that forwards requests to a stdio process.
* Maintains a mapping of sessionId -> StdioClientTransport for reuse.
* If sessionId is provided, it is returned in the response header as mcp_session_id.
* @param request The incoming HTTP request
* @param command The command to execute for the stdio process
* @param args Arguments for the command
* @param options Optional configuration for the proxy
* @param sessionId Optional session id for transport reuse
* @returns Promise that resolves to the HTTP response
*/
// Track both the transport and its last used timestamp
type StdioTransportEntry = {
transport: StdioClientTransport;
lastUsed: number; // ms since epoch
};
const stdioTransports: Map<string, StdioTransportEntry> = new Map();
/**
* Cleans up any stdio transports that have not been used in the last 5 minutes.
* Closes and removes them from the map.
*/
function cleanupOldStdioTransports() {
const now = Date.now();
const FIVE_MINUTES = 5 * 60 * 1000;
for (const [sessionId, entry] of stdioTransports.entries()) {
if (now - entry.lastUsed > FIVE_MINUTES) {
try {
entry.transport.close?.();
} catch (err) {
// ignore
}
stdioTransports.delete(sessionId);
}
}
}
export function createMCPStdioProxy(
request: Request,
command: string,
args?: string[],
options?: {
/** Enable debug logging */
debug?: boolean;
/** Environment variables to pass to the process */
env?: Record<string, string>;
/** Custom header-to-environment variable mapping */
headerMapping?: Record<string, string>;
/** Optional session id for transport reuse */
sessionId?: string;
}
): Promise<Response> {
return new Promise<Response>(async (resolve) => {
let bridge: any = null;
let serverTransport: StdioClientTransport | undefined;
let sessionId: string | undefined =
options?.sessionId || request.headers.get("Mcp-Session-Id") || undefined;
// Clean up old transports before handling new connection
cleanupOldStdioTransports();
try {
// Extract headers from the incoming request and convert to environment variables
const env = createEnvironmentFromRequest(
request,
options?.env || {},
options?.headerMapping || {}
);
// If sessionId is provided, try to reuse the transport
let entry: StdioTransportEntry | undefined;
if (sessionId) {
entry = stdioTransports.get(sessionId);
if (entry) {
serverTransport = entry.transport;
entry.lastUsed = Date.now();
}
}
// If no transport exists for this sessionId, create a new one and store it
if (!serverTransport) {
serverTransport = new StdioClientTransport({
command,
args: args || [],
env,
});
await serverTransport.start();
if (sessionId) {
stdioTransports.set(sessionId, {
transport: serverTransport,
lastUsed: Date.now(),
});
}
}
// Create Remix transport (converts HTTP to MCP messages)
// We need to wrap resolve to inject the sessionId header if present
const resolveWithSessionId = (response: Response) => {
if (sessionId) {
// Clone the response and add the mcp_session_id header
const headers = new Headers(response.headers);
headers.set("mcp-session-id", sessionId);
resolve(
new Response(response.body, {
status: response.status,
statusText: response.statusText,
headers,
})
);
} else {
resolve(response);
}
};
const clientTransport = new RemixMCPTransport(request, resolveWithSessionId);
// Bridge the transports
const bridgeOptions: any = {
debug: options?.debug || false,
onError: (error: Error, source: string) => {
console.error(`[MCP Stdio Bridge] ${source} error:`, error);
},
};
if (options?.debug) {
bridgeOptions.onMessage = (direction: string, message: any) => {
console.log(`[MCP Stdio Bridge] ${direction}:`, message.method || message.id);
};
}
bridge = createMCPTransportBridge(
clientTransport as any,
serverTransport as any,
bridgeOptions
);
// Start only the client transport (server is already started)
await clientTransport.start();
} catch (error) {
console.error("MCP Stdio Proxy Error:", error);
if (bridge) {
bridge.close().catch(console.error);
}
const errorMessage = error instanceof Error ? error.message : String(error);
// Always include mcp_session_id header if sessionId is present
const headers: Record<string, string> = { "Content-Type": "application/json" };
if (sessionId) {
headers["mcp-session-id"] = sessionId;
}
resolve(
new Response(
JSON.stringify({
error: `Stdio proxy error: ${errorMessage}`,
}),
{
status: 500,
headers,
}
)
);
}
});
}
/**
* Creates environment variables from request headers
*/
function createEnvironmentFromRequest(
request: Request,
baseEnv: Record<string, string>,
headerMapping: Record<string, string>
): Record<string, string> {
// Start with base environment (inherit safe environment variables)
const env: Record<string, string> = {
...getDefaultEnvironment(),
...baseEnv,
};
// Add standard MCP headers as environment variables
const sessionId = request.headers.get("Mcp-Session-Id");
const lastEventId = request.headers.get("Last-Event-Id");
const contentType = request.headers.get("Content-Type");
const userAgent = request.headers.get("User-Agent");
if (sessionId) {
env["MCP_SESSION_ID"] = sessionId;
}
if (lastEventId) {
env["MCP_LAST_EVENT_ID"] = lastEventId;
}
if (contentType) {
env["MCP_CONTENT_TYPE"] = contentType;
}
if (userAgent) {
env["MCP_USER_AGENT"] = userAgent;
}
// Apply custom header-to-environment variable mapping
for (const [headerName, envVarName] of Object.entries(headerMapping)) {
const headerValue = request.headers.get(headerName);
if (headerValue) {
env[envVarName] = headerValue;
}
}
return env;
}
/**
* Returns a default environment object including only environment variables deemed safe to inherit.
*/
function getDefaultEnvironment(): Record<string, string> {
const DEFAULT_INHERITED_ENV_VARS =
process.platform === "win32"
? [
"APPDATA",
"HOMEDRIVE",
"HOMEPATH",
"LOCALAPPDATA",
"PATH",
"PROCESSOR_ARCHITECTURE",
"SYSTEMDRIVE",
"SYSTEMROOT",
"TEMP",
"USERNAME",
"USERPROFILE",
]
: ["HOME", "LOGNAME", "PATH", "SHELL", "TERM", "USER"];
const env: Record<string, string> = {};
for (const key of DEFAULT_INHERITED_ENV_VARS) {
const value = process.env[key];
if (value === undefined) {
continue;
}
if (value.startsWith("()")) {
// Skip functions, which are a security risk.
continue;
}
env[key] = value;
}
return env;
}
export class MCPAuthenticationClient { export class MCPAuthenticationClient {
private serverUrlHash: string; private serverUrlHash: string;
private authProvider: NodeOAuthClientProvider | null = null; private authProvider: NodeOAuthClientProvider | null = null;

View File

@ -5,6 +5,7 @@ export * from "./types/index.js";
export { export {
createMCPAuthClient, createMCPAuthClient,
createMCPProxy, createMCPProxy,
createMCPStdioProxy,
MCPAuthenticationClient, MCPAuthenticationClient,
} from "./core/mcp-remote-client.js"; } from "./core/mcp-remote-client.js";

View File

@ -51,7 +51,7 @@ export interface ProxyConnectionConfig {
/** /**
* Transport strategy options * Transport strategy options
*/ */
export type TransportStrategy = "sse-only" | "http-only" | "sse-first" | "http-first"; export type TransportStrategy = "sse-only" | "http-only" | "sse-first" | "http-first" | "stdio";
/** /**
* Static OAuth client metadata * Static OAuth client metadata

View File

@ -23,7 +23,7 @@ export function createMCPTransportBridge(
// Forward messages from client to server // Forward messages from client to server
clientTransport.onmessage = (message: any, extra: any) => { clientTransport.onmessage = (message: any, extra: any) => {
console.log(JSON.stringify(message)); console.log(message);
log("[Client→Server]", message.method || message.id); log("[Client→Server]", message.method || message.id);
onMessage?.("client-to-server", message); onMessage?.("client-to-server", message);
@ -41,6 +41,7 @@ export function createMCPTransportBridge(
// Forward messages from server to client // Forward messages from server to client
serverTransport.onmessage = (message: any, extra: any) => { serverTransport.onmessage = (message: any, extra: any) => {
console.log(message);
console.log(JSON.stringify(message), JSON.stringify(extra)); console.log(JSON.stringify(message), JSON.stringify(extra));
log("[Server→Client]", message.method || message.id); log("[Server→Client]", message.method || message.id);
onMessage?.("server-to-client", message); onMessage?.("server-to-client", message);

View File

@ -34,13 +34,24 @@ export class RemixMCPTransport implements Transport {
throw new Error("Invalid JSON-RPC message"); throw new Error("Invalid JSON-RPC message");
} }
// Emit the message to handler if (message.method.includes("notifications")) {
if (this.onmessage) { this.send({});
try { return;
this.onmessage(message); }
} catch (error) {
if (this.onerror) { console.log(message, "message");
this.onerror(error as Error);
if (Object.keys(message).length === 0) {
this.send({});
} else {
// Emit the message to handler
if (this.onmessage) {
try {
this.onmessage(message);
} catch (error) {
if (this.onerror) {
this.onerror(error as Error);
}
} }
} }
} }

11
pnpm-lock.yaml generated
View File

@ -568,6 +568,9 @@ importers:
dotenv: dotenv:
specifier: ^16.4.5 specifier: ^16.4.5
version: 16.5.0 version: 16.5.0
dotenv-expand:
specifier: ^12.0.2
version: 12.0.2
esbuild: esbuild:
specifier: ^0.23.0 specifier: ^0.23.0
version: 0.23.1 version: 0.23.1
@ -6315,6 +6318,10 @@ packages:
resolution: {integrity: sha512-GopVGCpVS1UKH75VKHGuQFqS1Gusej0z4FyQkPdwjil2gNIv+LNsqBlboOzpJFZKVT95GkCyWJbBSdFEFUWI2A==} resolution: {integrity: sha512-GopVGCpVS1UKH75VKHGuQFqS1Gusej0z4FyQkPdwjil2gNIv+LNsqBlboOzpJFZKVT95GkCyWJbBSdFEFUWI2A==}
engines: {node: '>=12'} engines: {node: '>=12'}
dotenv-expand@12.0.2:
resolution: {integrity: sha512-lXpXz2ZE1cea1gL4sz2Ipj8y4PiVjytYr3Ij0SWoms1PGxIv7m2CRKuRuCRtHdVuvM/hNJPMxt5PbhboNC4dPQ==}
engines: {node: '>=12'}
dotenv@16.0.3: dotenv@16.0.3:
resolution: {integrity: sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==} resolution: {integrity: sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==}
engines: {node: '>=12'} engines: {node: '>=12'}
@ -17106,6 +17113,10 @@ snapshots:
dotenv-expand@10.0.0: {} dotenv-expand@10.0.0: {}
dotenv-expand@12.0.2:
dependencies:
dotenv: 16.5.0
dotenv@16.0.3: {} dotenv@16.0.3: {}
dotenv@16.4.7: {} dotenv@16.4.7: {}

View File

@ -31,7 +31,7 @@ POSTGRES_USER=docker
POSTGRES_PASSWORD=docker POSTGRES_PASSWORD=docker
TRIGGER_DB=trigger TRIGGER_DB=trigger
DB_HOST=localhost DB_HOST=host.docker.internal
DB_PORT=5432 DB_PORT=5432
DB_SCHEMA=sigma DB_SCHEMA=sigma

1
trigger/auth.htpasswd Normal file
View File

@ -0,0 +1 @@
registry-user:$2y$05$6ingYqw0.3j13dxHY4w3neMSvKhF3pvRmc0AFifScWsVA9JpuLwNK

View File

@ -146,7 +146,7 @@ services:
- webapp - webapp
volumes: volumes:
# registry-user:very-secure-indeed # registry-user:very-secure-indeed
- ../registry/auth.htpasswd:/auth/htpasswd:ro - ./auth.htpasswd:/auth/htpasswd:ro
environment: environment:
REGISTRY_AUTH: htpasswd REGISTRY_AUTH: htpasswd
REGISTRY_AUTH_HTPASSWD_REALM: Registry Realm REGISTRY_AUTH_HTPASSWD_REALM: Registry Realm

View File

@ -1,6 +1,5 @@
{ {
"$schema": "https://turborepo.com/schema.json", "$schema": "https://turborepo.com/schema.json",
"ui": "tui",
"tasks": { "tasks": {
"build": { "build": {
"dependsOn": [ "^build" ], "dependsOn": [ "^build" ],
@ -52,6 +51,7 @@
"SESSION_SECRET", "SESSION_SECRET",
"APP_ORIGIN", "APP_ORIGIN",
"LOGIN_ORIGIN", "LOGIN_ORIGIN",
"API_BASE_URL",
"POSTHOG_PROJECT_KEY", "POSTHOG_PROJECT_KEY",
"AUTH_GOOGLE_CLIENT_ID", "AUTH_GOOGLE_CLIENT_ID",
"AUTH_GOOGLE_CLIENT_SECRET", "AUTH_GOOGLE_CLIENT_SECRET",